Search in sources :

Example 31 with JsonGenerator

use of org.codehaus.jackson.JsonGenerator in project hive by apache.

the class SystemConfigurationServlet method doGet.

@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
    JsonGenerator jg = null;
    PrintWriter writer = null;
    if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), request, response)) {
        response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
        return;
    }
    setResponseHeader(response);
    boolean refresh = Boolean.parseBoolean(request.getParameter("refresh"));
    try {
        writer = response.getWriter();
        jg = jsonFactory.createJsonGenerator(writer);
        jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
        jg.useDefaultPrettyPrinter();
        jg.writeStartObject();
        jg.writeObjectField("pid", LlapDaemonInfo.INSTANCE.getPID());
        jg.writeObjectField("os.name", System.getProperty("os.name"));
        if (Shell.WINDOWS) {
            jg.writeObjectField("net.core.somaxconn", NetUtil.SOMAXCONN);
        } else {
            String sysctlCmd = "sysctl -a";
            try {
                if (sysctlOutRef.get() == null || refresh) {
                    LOG.info("Reading kernel configs via sysctl..");
                    String sysctlOutput = Shell.execCommand(sysctlCmd.split("\\s+"));
                    sysctlOutRef.set(sysctlOutput);
                }
            } catch (IOException e) {
                LOG.warn("Unable to execute '{}' command", sysctlCmd, e);
                // failures will not be retried (to avoid fork + exec running sysctl command)
                sysctlOutRef.set(FAILED);
                jg.writeObjectField("sysctl", FAILED);
                jg.writeObjectField("sysctl-failure-reason", e.getMessage());
            }
            if (sysctlOutRef.get() != null && !sysctlOutRef.get().equals(FAILED)) {
                String[] lines = sysctlOutRef.get().split("\\r?\\n");
                for (String line : lines) {
                    int sepIdx = line.indexOf(SYSCTL_KV_SEPARATOR);
                    String key = sepIdx == -1 ? line.trim() : line.substring(0, sepIdx).trim();
                    String value = sepIdx == -1 ? null : line.substring(sepIdx + 1).trim().replaceAll("\t", "  ");
                    if (!key.isEmpty()) {
                        jg.writeObjectField(key, value);
                    }
                }
            }
            if (!Shell.MAC) {
                // Red Hat: /sys/kernel/mm/redhat_transparent_hugepage/enabled
                // /sys/kernel/mm/redhat_transparent_hugepage/defrag
                // CentOS/Ubuntu/Debian, OEL, SLES: /sys/kernel/mm/transparent_hugepage/enabled
                // /sys/kernel/mm/transparent_hugepage/defrag
                String thpFileName = "/sys/kernel/mm/transparent_hugepage/enabled";
                String thpFileStr = PrivilegedFileReader.read(thpFileName);
                if (thpFileStr == null) {
                    LOG.warn("Unable to read contents of {}", thpFileName);
                    thpFileName = "/sys/kernel/mm/redhat_transparent_hugepage/enabled";
                    thpFileStr = PrivilegedFileReader.read(thpFileName);
                }
                if (thpFileStr != null) {
                    // Format: "always madvise [never]"
                    int strIdx = thpFileStr.indexOf('[');
                    int endIdx = thpFileStr.indexOf(']');
                    jg.writeObjectField(thpFileName, thpFileStr.substring(strIdx + 1, endIdx));
                } else {
                    LOG.warn("Unable to read contents of {}", thpFileName);
                }
                String thpDefragFileName = "/sys/kernel/mm/transparent_hugepage/defrag";
                String thpDefragFileStr = PrivilegedFileReader.read(thpDefragFileName);
                if (thpDefragFileStr == null) {
                    LOG.warn("Unable to read contents of {}", thpDefragFileName);
                    thpDefragFileName = "/sys/kernel/mm/redhat_transparent_hugepage/defrag";
                    thpDefragFileStr = PrivilegedFileReader.read(thpDefragFileName);
                }
                if (thpDefragFileStr != null) {
                    // Format: "always madvise [never]"
                    int strIdx = thpDefragFileStr.indexOf('[');
                    int endIdx = thpDefragFileStr.indexOf(']');
                    jg.writeObjectField(thpDefragFileName, thpDefragFileStr.substring(strIdx + 1, endIdx));
                } else {
                    LOG.warn("Unable to read contents of {}", thpDefragFileName);
                }
            }
        }
        jg.writeEndObject();
        response.setStatus(HttpServletResponse.SC_OK);
    } catch (Exception e) {
        LOG.error("Caught exception while processing llap /system web service request", e);
        response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
    } finally {
        if (jg != null) {
            jg.close();
        }
        if (writer != null) {
            writer.close();
        }
    }
}
Also used : JsonGenerator(org.codehaus.jackson.JsonGenerator) IOException(java.io.IOException) ServletException(javax.servlet.ServletException) IOException(java.io.IOException) PrintWriter(java.io.PrintWriter)

Example 32 with JsonGenerator

use of org.codehaus.jackson.JsonGenerator in project perun by CESNET.

the class JsonSerializer method writePerunRuntimeException.

@Override
public void writePerunRuntimeException(PerunRuntimeException prex) throws IOException {
    JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8);
    if (prex == null) {
        throw new IllegalArgumentException("prex is null");
    } else {
        gen.writeObject(prex);
        gen.flush();
    }
    gen.close();
}
Also used : JsonGenerator(org.codehaus.jackson.JsonGenerator)

Example 33 with JsonGenerator

use of org.codehaus.jackson.JsonGenerator in project perun by CESNET.

the class JsonSerializerGWT method write.

@Override
public void write(Object object) throws RpcException, IOException {
    JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8);
    if (object instanceof Throwable) {
        throw new IllegalArgumentException("Tried to serialize a throwable object using write()", (Throwable) object);
    }
    try {
        gen.writeRaw(callback + "(");
        gen.writeObject(object);
        gen.writeRaw(");");
        gen.flush();
        gen.close();
    } catch (JsonProcessingException ex) {
        throw new RpcException(RpcException.Type.CANNOT_SERIALIZE_VALUE, ex);
    }
}
Also used : RpcException(cz.metacentrum.perun.core.api.exceptions.RpcException) JsonGenerator(org.codehaus.jackson.JsonGenerator) JsonProcessingException(org.codehaus.jackson.JsonProcessingException)

Example 34 with JsonGenerator

use of org.codehaus.jackson.JsonGenerator in project perun by CESNET.

the class JsonSerializerJSONSIMPLE method writePerunRuntimeException.

@Override
public void writePerunRuntimeException(PerunRuntimeException prex) throws IOException {
    JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8);
    if (prex == null) {
        throw new IllegalArgumentException("prex is null");
    } else {
        gen.writeObject(prex);
        gen.flush();
    }
    gen.close();
}
Also used : JsonGenerator(org.codehaus.jackson.JsonGenerator)

Example 35 with JsonGenerator

use of org.codehaus.jackson.JsonGenerator in project hive by apache.

the class EximUtil method createExportDump.

public static void createExportDump(FileSystem fs, Path metadataPath, org.apache.hadoop.hive.ql.metadata.Table tableHandle, Iterable<org.apache.hadoop.hive.ql.metadata.Partition> partitions, ReplicationSpec replicationSpec) throws SemanticException, IOException {
    if (replicationSpec == null) {
        // instantiate default values if not specified
        replicationSpec = new ReplicationSpec();
    }
    if (tableHandle == null) {
        replicationSpec.setNoop(true);
    }
    OutputStream out = fs.create(metadataPath);
    JsonGenerator jgen = (new JsonFactory()).createJsonGenerator(out);
    jgen.writeStartObject();
    jgen.writeStringField("version", METADATA_FORMAT_VERSION);
    if (METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION != null) {
        jgen.writeStringField("fcversion", METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION);
    }
    if (replicationSpec.isInReplicationScope()) {
        for (ReplicationSpec.KEY key : ReplicationSpec.KEY.values()) {
            String value = replicationSpec.get(key);
            if (value != null) {
                jgen.writeStringField(key.toString(), value);
            }
        }
        if (tableHandle != null) {
            Table ttable = tableHandle.getTTable();
            ttable.putToParameters(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replicationSpec.getCurrentReplicationState());
            if ((ttable.getParameters().containsKey("EXTERNAL")) && (ttable.getParameters().get("EXTERNAL").equalsIgnoreCase("TRUE"))) {
                // Replication destination will not be external - override if set
                ttable.putToParameters("EXTERNAL", "FALSE");
            }
            if (ttable.isSetTableType() && ttable.getTableType().equalsIgnoreCase(TableType.EXTERNAL_TABLE.toString())) {
                // Replication dest will not be external - override if set
                ttable.setTableType(TableType.MANAGED_TABLE.toString());
            }
        }
    } else {
    // ReplicationSpec.KEY scopeKey = ReplicationSpec.KEY.REPL_SCOPE;
    // write(out, ",\""+ scopeKey.toString() +"\":\"" + replicationSpec.get(scopeKey) + "\"");
    // TODO: if we want to be explicit about this dump not being a replication dump, we can
    // uncomment this else section, but currently unnneeded. Will require a lot of golden file
    // regen if we do so.
    }
    if ((tableHandle != null) && (!replicationSpec.isNoop())) {
        TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
        try {
            jgen.writeStringField("table", serializer.toString(tableHandle.getTTable(), "UTF-8"));
            jgen.writeFieldName("partitions");
            jgen.writeStartArray();
            if (partitions != null) {
                for (org.apache.hadoop.hive.ql.metadata.Partition partition : partitions) {
                    Partition tptn = partition.getTPartition();
                    if (replicationSpec.isInReplicationScope()) {
                        tptn.putToParameters(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replicationSpec.getCurrentReplicationState());
                        if ((tptn.getParameters().containsKey("EXTERNAL")) && (tptn.getParameters().get("EXTERNAL").equalsIgnoreCase("TRUE"))) {
                            // Replication destination will not be external
                            tptn.putToParameters("EXTERNAL", "FALSE");
                        }
                    }
                    jgen.writeString(serializer.toString(tptn, "UTF-8"));
                    jgen.flush();
                }
            }
            jgen.writeEndArray();
        } catch (TException e) {
            throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
        }
    }
    jgen.writeEndObject();
    // JsonGenerator owns the OutputStream, so it closes it when we call close.
    jgen.close();
}
Also used : TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) ByteArrayOutputStream(java.io.ByteArrayOutputStream) OutputStream(java.io.OutputStream) JsonFactory(org.codehaus.jackson.JsonFactory) TSerializer(org.apache.thrift.TSerializer) TJSONProtocol(org.apache.thrift.protocol.TJSONProtocol) JsonGenerator(org.codehaus.jackson.JsonGenerator)

Aggregations

JsonGenerator (org.codehaus.jackson.JsonGenerator)54 JsonFactory (org.codehaus.jackson.JsonFactory)15 IOException (java.io.IOException)14 ByteArrayOutputStream (java.io.ByteArrayOutputStream)12 ObjectMapper (org.codehaus.jackson.map.ObjectMapper)11 StringWriter (java.io.StringWriter)10 JsonProcessingException (org.codehaus.jackson.JsonProcessingException)8 RpcException (cz.metacentrum.perun.core.api.exceptions.RpcException)6 JsonGenerationException (org.codehaus.jackson.JsonGenerationException)5 JsonNode (org.codehaus.jackson.JsonNode)5 OutputStreamWriter (java.io.OutputStreamWriter)4 HashMap (java.util.HashMap)4 File (java.io.File)3 PrintWriter (java.io.PrintWriter)3 GET (javax.ws.rs.GET)3 Response (javax.ws.rs.core.Response)3 GenericRecord (org.apache.avro.generic.GenericRecord)3 BufferedWriter (java.io.BufferedWriter)2 DataOutputStream (java.io.DataOutputStream)2 ArrayList (java.util.ArrayList)2