Search in sources :

Example 41 with JsonWriter

use of com.google.gson.stream.JsonWriter in project cdap-ingest by caskdata.

the class FileTailerStateProcessorImpl method saveState.

@Override
public void saveState(FileTailerState state) throws FileTailerStateProcessorException {
    try {
        Preconditions.checkNotNull(state);
    } catch (NullPointerException e) {
        LOG.info("Cannot save null state");
        return;
    }
    createDirs(stateDir);
    LOG.debug("Start saving File Tailer state ..");
    try {
        JsonWriter jsonWriter = new JsonWriter(Files.newWriter(stateFile, UTF_8));
        try {
            GSON.toJson(state, FileTailerState.class, jsonWriter);
            LOG.debug("File Tailer state saved successfully");
        } finally {
            try {
                jsonWriter.close();
            } catch (IOException e) {
                LOG.error("Cannot close JSON Writer for file {}: {}", stateFile.getAbsolutePath(), e.getMessage(), e);
            }
        }
    } catch (IOException e) {
        LOG.error("Cannot close JSON Writer for file {}: {}", stateFile.getAbsolutePath(), e.getMessage(), e);
    }
}
Also used : IOException(java.io.IOException) JsonWriter(com.google.gson.stream.JsonWriter)

Example 42 with JsonWriter

use of com.google.gson.stream.JsonWriter in project cdap by caskdata.

the class Configuration method dumpConfiguration.

/**
   *  Writes out all the parameters and their properties (final and resource) to
   *  the given {@link Writer}
   *  The format of the output would be
   *  { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2,
   *  key2.isFinal,key2.resource}... ] }
   *  It does not output the parameters of the configuration object which is
   *  loaded from an input stream.
   * @param out the Writer to write to
   * @throws IOException
   */
public static void dumpConfiguration(Configuration config, Writer out) throws IOException {
    JsonWriter dumpGenerator = new JsonWriter(out);
    dumpGenerator.beginObject();
    dumpGenerator.name("properties");
    dumpGenerator.beginArray();
    dumpGenerator.flush();
    synchronized (config) {
        for (Map.Entry<Object, Object> item : config.getProps().entrySet()) {
            dumpGenerator.beginObject();
            dumpGenerator.name("key").value((String) item.getKey());
            dumpGenerator.name("value").value(config.get((String) item.getKey()));
            dumpGenerator.name("isFinal").value(config.finalParameters.contains(item.getKey()));
            dumpGenerator.name("resource").value(config.updatingResource.get(item.getKey()));
            dumpGenerator.endObject();
        }
    }
    dumpGenerator.endArray();
    dumpGenerator.endObject();
    dumpGenerator.flush();
}
Also used : JsonWriter(com.google.gson.stream.JsonWriter) HashMap(java.util.HashMap) Map(java.util.Map) WeakHashMap(java.util.WeakHashMap)

Example 43 with JsonWriter

use of com.google.gson.stream.JsonWriter in project hive by apache.

the class LineageLogger method run.

@Override
public void run(HookContext hookContext) {
    assert (hookContext.getHookType() == HookType.POST_EXEC_HOOK);
    QueryPlan plan = hookContext.getQueryPlan();
    Index index = hookContext.getIndex();
    SessionState ss = SessionState.get();
    if (ss != null && index != null && OPERATION_NAMES.contains(plan.getOperationName()) && !plan.isExplain()) {
        try {
            StringBuilderWriter out = new StringBuilderWriter(1024);
            JsonWriter writer = new JsonWriter(out);
            String queryStr = plan.getQueryStr().trim();
            writer.beginObject();
            writer.name("version").value(FORMAT_VERSION);
            HiveConf conf = ss.getConf();
            boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
            if (!testMode) {
                // Don't emit user/timestamp info in test mode,
                // so that the test golden output file is fixed.
                long queryTime = plan.getQueryStartTime().longValue();
                if (queryTime == 0)
                    queryTime = System.currentTimeMillis();
                long duration = System.currentTimeMillis() - queryTime;
                writer.name("user").value(hookContext.getUgi().getUserName());
                writer.name("timestamp").value(queryTime / 1000);
                writer.name("duration").value(duration);
                writer.name("jobIds");
                writer.beginArray();
                List<TaskRunner> tasks = hookContext.getCompleteTaskList();
                if (tasks != null && !tasks.isEmpty()) {
                    for (TaskRunner task : tasks) {
                        String jobId = task.getTask().getJobID();
                        if (jobId != null) {
                            writer.value(jobId);
                        }
                    }
                }
                writer.endArray();
            }
            writer.name("engine").value(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE));
            writer.name("database").value(ss.getCurrentDatabase());
            writer.name("hash").value(getQueryHash(queryStr));
            writer.name("queryText").value(queryStr);
            List<Edge> edges = getEdges(plan, index);
            Set<Vertex> vertices = getVertices(edges);
            writeEdges(writer, edges, hookContext.getConf());
            writeVertices(writer, vertices);
            writer.endObject();
            writer.close();
            // Logger the lineage info
            String lineage = out.toString();
            if (testMode) {
                // Logger to console
                log(lineage);
            } else {
                // In non-test mode, emit to a log file,
                // which can be different from the normal hive.log.
                // For example, using NoDeleteRollingFileAppender to
                // log to some file with different rolling policy.
                LOG.info(lineage);
            }
        } catch (Throwable t) {
            // Don't fail the query just because of any lineage issue.
            log("Failed to log lineage graph, query is not affected\n" + org.apache.hadoop.util.StringUtils.stringifyException(t));
        }
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) StringBuilderWriter(org.apache.commons.io.output.StringBuilderWriter) Index(org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) JsonWriter(com.google.gson.stream.JsonWriter) TaskRunner(org.apache.hadoop.hive.ql.exec.TaskRunner) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 44 with JsonWriter

use of com.google.gson.stream.JsonWriter in project gradle by gradle.

the class ModuleMetadataFileGenerator method generateTo.

public void generateTo(PublicationInternal publication, Collection<? extends PublicationInternal> publications, Writer writer) throws IOException {
    // Collect a map from component to coordinates. This might be better to move to the component or some publications model
    Map<SoftwareComponent, ComponentData> coordinates = new HashMap<SoftwareComponent, ComponentData>();
    collectCoordinates(publications, coordinates);
    // Collect a map from component to its owning component. This might be better to move to the component or some publications model
    Map<SoftwareComponent, SoftwareComponent> owners = new HashMap<SoftwareComponent, SoftwareComponent>();
    collectOwners(publications, owners);
    // Write the output
    JsonWriter jsonWriter = new JsonWriter(writer);
    jsonWriter.setHtmlSafe(false);
    jsonWriter.setIndent("  ");
    writeComponentWithVariants(publication, publication.getComponent(), coordinates, owners, jsonWriter);
    jsonWriter.flush();
    writer.append('\n');
}
Also used : HashMap(java.util.HashMap) JsonWriter(com.google.gson.stream.JsonWriter) SoftwareComponent(org.gradle.api.component.SoftwareComponent)

Example 45 with JsonWriter

use of com.google.gson.stream.JsonWriter in project linuxtools by eclipse.

the class TestOSIORestPostNewComment method testPostNewComment.

@Test
public void testPostNewComment() throws Exception {
    TestData testData = new TestData();
    TestUtils.initSpaces(requestProvider, testData);
    OSIORestClient client = connector.getClient(repository, requestProvider);
    OSIORestConfiguration config = client.getConfiguration(repository, new NullOperationMonitor());
    config.setSpaces(testData.spaceMap);
    connector.setConfiguration(config);
    RepositoryLocation location = client.getClient().getLocation();
    location.setProperty(IOSIORestConstants.REPOSITORY_AUTH_ID, "user");
    location.setProperty(IOSIORestConstants.REPOSITORY_AUTH_TOKEN, "xxxxxxTokenxxxxxx");
    AbstractTaskDataHandler taskDataHandler = connector.getTaskDataHandler();
    TaskAttributeMapper mapper = taskDataHandler.getAttributeMapper(repository);
    TaskData taskData = new TaskData(mapper, repository.getConnectorKind(), repository.getRepositoryUrl(), "");
    OSIORestTaskSchema.getDefault().initialize(taskData);
    Set<TaskAttribute> attributes = new LinkedHashSet<>();
    TaskAttribute newComment = taskData.getRoot().getAttribute(OSIORestTaskSchema.getDefault().NEW_COMMENT.getKey());
    newComment.setValue("This is a test comment");
    attributes.add(newComment);
    OSIORestPostNewCommentTask data = new OSIORestPostNewCommentTask(client.getClient(), taskData, attributes);
    OSIORestPostNewCommentTask.TaskAttributeTypeAdapter adapter = data.new TaskAttributeTypeAdapter(location);
    OSIORestPostNewCommentTask.OldAttributes oldAttributes = data.new OldAttributes(attributes);
    StringWriter s = new StringWriter();
    JsonWriter writer = new JsonWriter(s);
    adapter.write(writer, oldAttributes);
    assertEquals("{\"data\":{\"attributes\":{\"body\":\"This is a test comment\",\"markup\":\"Markdown\"},\"type\":\"comments\"},\"included\":[]}", s.getBuffer().toString());
}
Also used : LinkedHashSet(java.util.LinkedHashSet) TaskAttribute(org.eclipse.mylyn.tasks.core.data.TaskAttribute) TestData(org.eclipse.linuxtools.mylyn.osio.rest.test.support.TestData) OSIORestPostNewCommentTask(org.eclipse.linuxtools.internal.mylyn.osio.rest.core.OSIORestPostNewCommentTask) OSIORestConfiguration(org.eclipse.linuxtools.internal.mylyn.osio.rest.core.OSIORestConfiguration) AbstractTaskDataHandler(org.eclipse.mylyn.tasks.core.data.AbstractTaskDataHandler) JsonWriter(com.google.gson.stream.JsonWriter) RepositoryLocation(org.eclipse.mylyn.commons.repositories.core.RepositoryLocation) TaskData(org.eclipse.mylyn.tasks.core.data.TaskData) StringWriter(java.io.StringWriter) OSIORestClient(org.eclipse.linuxtools.internal.mylyn.osio.rest.core.OSIORestClient) TaskAttributeMapper(org.eclipse.mylyn.tasks.core.data.TaskAttributeMapper) NullOperationMonitor(org.eclipse.linuxtools.internal.mylyn.osio.rest.core.NullOperationMonitor) Test(org.junit.Test)

Aggregations

JsonWriter (com.google.gson.stream.JsonWriter)46 StringWriter (java.io.StringWriter)21 Test (org.junit.Test)14 OutputStreamWriter (java.io.OutputStreamWriter)11 Gson (com.google.gson.Gson)10 JsonReader (com.google.gson.stream.JsonReader)10 IOException (java.io.IOException)10 Writer (java.io.Writer)7 StringReader (java.io.StringReader)5 Map (java.util.Map)5 NullOperationMonitor (org.eclipse.linuxtools.internal.mylyn.osio.rest.core.NullOperationMonitor)5 OSIORestClient (org.eclipse.linuxtools.internal.mylyn.osio.rest.core.OSIORestClient)5 OSIORestConfiguration (org.eclipse.linuxtools.internal.mylyn.osio.rest.core.OSIORestConfiguration)5 TestData (org.eclipse.linuxtools.mylyn.osio.rest.test.support.TestData)5 RepositoryLocation (org.eclipse.mylyn.commons.repositories.core.RepositoryLocation)5 AbstractTaskDataHandler (org.eclipse.mylyn.tasks.core.data.AbstractTaskDataHandler)5 TaskAttributeMapper (org.eclipse.mylyn.tasks.core.data.TaskAttributeMapper)5 TaskData (org.eclipse.mylyn.tasks.core.data.TaskData)5 JsonObject (com.google.gson.JsonObject)4 SuppressLint (android.annotation.SuppressLint)3