use of org.codehaus.jackson.JsonGenerator in project hive by apache.
the class SystemConfigurationServlet method doGet.
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
JsonGenerator jg = null;
PrintWriter writer = null;
if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), request, response)) {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
return;
}
setResponseHeader(response);
boolean refresh = Boolean.parseBoolean(request.getParameter("refresh"));
try {
writer = response.getWriter();
jg = jsonFactory.createJsonGenerator(writer);
jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
jg.useDefaultPrettyPrinter();
jg.writeStartObject();
jg.writeObjectField("pid", LlapDaemonInfo.INSTANCE.getPID());
jg.writeObjectField("os.name", System.getProperty("os.name"));
if (Shell.WINDOWS) {
jg.writeObjectField("net.core.somaxconn", NetUtil.SOMAXCONN);
} else {
String sysctlCmd = "sysctl -a";
try {
if (sysctlOutRef.get() == null || refresh) {
LOG.info("Reading kernel configs via sysctl..");
String sysctlOutput = Shell.execCommand(sysctlCmd.split("\\s+"));
sysctlOutRef.set(sysctlOutput);
}
} catch (IOException e) {
LOG.warn("Unable to execute '{}' command", sysctlCmd, e);
// failures will not be retried (to avoid fork + exec running sysctl command)
sysctlOutRef.set(FAILED);
jg.writeObjectField("sysctl", FAILED);
jg.writeObjectField("sysctl-failure-reason", e.getMessage());
}
if (sysctlOutRef.get() != null && !sysctlOutRef.get().equals(FAILED)) {
String[] lines = sysctlOutRef.get().split("\\r?\\n");
for (String line : lines) {
int sepIdx = line.indexOf(SYSCTL_KV_SEPARATOR);
String key = sepIdx == -1 ? line.trim() : line.substring(0, sepIdx).trim();
String value = sepIdx == -1 ? null : line.substring(sepIdx + 1).trim().replaceAll("\t", " ");
if (!key.isEmpty()) {
jg.writeObjectField(key, value);
}
}
}
if (!Shell.MAC) {
// Red Hat: /sys/kernel/mm/redhat_transparent_hugepage/enabled
// /sys/kernel/mm/redhat_transparent_hugepage/defrag
// CentOS/Ubuntu/Debian, OEL, SLES: /sys/kernel/mm/transparent_hugepage/enabled
// /sys/kernel/mm/transparent_hugepage/defrag
String thpFileName = "/sys/kernel/mm/transparent_hugepage/enabled";
String thpFileStr = PrivilegedFileReader.read(thpFileName);
if (thpFileStr == null) {
LOG.warn("Unable to read contents of {}", thpFileName);
thpFileName = "/sys/kernel/mm/redhat_transparent_hugepage/enabled";
thpFileStr = PrivilegedFileReader.read(thpFileName);
}
if (thpFileStr != null) {
// Format: "always madvise [never]"
int strIdx = thpFileStr.indexOf('[');
int endIdx = thpFileStr.indexOf(']');
jg.writeObjectField(thpFileName, thpFileStr.substring(strIdx + 1, endIdx));
} else {
LOG.warn("Unable to read contents of {}", thpFileName);
}
String thpDefragFileName = "/sys/kernel/mm/transparent_hugepage/defrag";
String thpDefragFileStr = PrivilegedFileReader.read(thpDefragFileName);
if (thpDefragFileStr == null) {
LOG.warn("Unable to read contents of {}", thpDefragFileName);
thpDefragFileName = "/sys/kernel/mm/redhat_transparent_hugepage/defrag";
thpDefragFileStr = PrivilegedFileReader.read(thpDefragFileName);
}
if (thpDefragFileStr != null) {
// Format: "always madvise [never]"
int strIdx = thpDefragFileStr.indexOf('[');
int endIdx = thpDefragFileStr.indexOf(']');
jg.writeObjectField(thpDefragFileName, thpDefragFileStr.substring(strIdx + 1, endIdx));
} else {
LOG.warn("Unable to read contents of {}", thpDefragFileName);
}
}
}
jg.writeEndObject();
response.setStatus(HttpServletResponse.SC_OK);
} catch (Exception e) {
LOG.error("Caught exception while processing llap /system web service request", e);
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
} finally {
if (jg != null) {
jg.close();
}
if (writer != null) {
writer.close();
}
}
}
use of org.codehaus.jackson.JsonGenerator in project perun by CESNET.
the class JsonSerializer method writePerunRuntimeException.
@Override
public void writePerunRuntimeException(PerunRuntimeException prex) throws IOException {
JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8);
if (prex == null) {
throw new IllegalArgumentException("prex is null");
} else {
gen.writeObject(prex);
gen.flush();
}
gen.close();
}
use of org.codehaus.jackson.JsonGenerator in project perun by CESNET.
the class JsonSerializerGWT method write.
@Override
public void write(Object object) throws RpcException, IOException {
JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8);
if (object instanceof Throwable) {
throw new IllegalArgumentException("Tried to serialize a throwable object using write()", (Throwable) object);
}
try {
gen.writeRaw(callback + "(");
gen.writeObject(object);
gen.writeRaw(");");
gen.flush();
gen.close();
} catch (JsonProcessingException ex) {
throw new RpcException(RpcException.Type.CANNOT_SERIALIZE_VALUE, ex);
}
}
use of org.codehaus.jackson.JsonGenerator in project perun by CESNET.
the class JsonSerializerJSONSIMPLE method writePerunRuntimeException.
@Override
public void writePerunRuntimeException(PerunRuntimeException prex) throws IOException {
JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8);
if (prex == null) {
throw new IllegalArgumentException("prex is null");
} else {
gen.writeObject(prex);
gen.flush();
}
gen.close();
}
use of org.codehaus.jackson.JsonGenerator in project hive by apache.
the class EximUtil method createExportDump.
public static void createExportDump(FileSystem fs, Path metadataPath, org.apache.hadoop.hive.ql.metadata.Table tableHandle, Iterable<org.apache.hadoop.hive.ql.metadata.Partition> partitions, ReplicationSpec replicationSpec) throws SemanticException, IOException {
if (replicationSpec == null) {
// instantiate default values if not specified
replicationSpec = new ReplicationSpec();
}
if (tableHandle == null) {
replicationSpec.setNoop(true);
}
OutputStream out = fs.create(metadataPath);
JsonGenerator jgen = (new JsonFactory()).createJsonGenerator(out);
jgen.writeStartObject();
jgen.writeStringField("version", METADATA_FORMAT_VERSION);
if (METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION != null) {
jgen.writeStringField("fcversion", METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION);
}
if (replicationSpec.isInReplicationScope()) {
for (ReplicationSpec.KEY key : ReplicationSpec.KEY.values()) {
String value = replicationSpec.get(key);
if (value != null) {
jgen.writeStringField(key.toString(), value);
}
}
if (tableHandle != null) {
Table ttable = tableHandle.getTTable();
ttable.putToParameters(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replicationSpec.getCurrentReplicationState());
if ((ttable.getParameters().containsKey("EXTERNAL")) && (ttable.getParameters().get("EXTERNAL").equalsIgnoreCase("TRUE"))) {
// Replication destination will not be external - override if set
ttable.putToParameters("EXTERNAL", "FALSE");
}
if (ttable.isSetTableType() && ttable.getTableType().equalsIgnoreCase(TableType.EXTERNAL_TABLE.toString())) {
// Replication dest will not be external - override if set
ttable.setTableType(TableType.MANAGED_TABLE.toString());
}
}
} else {
// ReplicationSpec.KEY scopeKey = ReplicationSpec.KEY.REPL_SCOPE;
// write(out, ",\""+ scopeKey.toString() +"\":\"" + replicationSpec.get(scopeKey) + "\"");
// TODO: if we want to be explicit about this dump not being a replication dump, we can
// uncomment this else section, but currently unnneeded. Will require a lot of golden file
// regen if we do so.
}
if ((tableHandle != null) && (!replicationSpec.isNoop())) {
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
try {
jgen.writeStringField("table", serializer.toString(tableHandle.getTTable(), "UTF-8"));
jgen.writeFieldName("partitions");
jgen.writeStartArray();
if (partitions != null) {
for (org.apache.hadoop.hive.ql.metadata.Partition partition : partitions) {
Partition tptn = partition.getTPartition();
if (replicationSpec.isInReplicationScope()) {
tptn.putToParameters(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replicationSpec.getCurrentReplicationState());
if ((tptn.getParameters().containsKey("EXTERNAL")) && (tptn.getParameters().get("EXTERNAL").equalsIgnoreCase("TRUE"))) {
// Replication destination will not be external
tptn.putToParameters("EXTERNAL", "FALSE");
}
}
jgen.writeString(serializer.toString(tptn, "UTF-8"));
jgen.flush();
}
}
jgen.writeEndArray();
} catch (TException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
}
}
jgen.writeEndObject();
// JsonGenerator owns the OutputStream, so it closes it when we call close.
jgen.close();
}
Aggregations