use of com.google.gson.stream.JsonWriter in project cdap-ingest by caskdata.
the class FileTailerStateProcessorImpl method saveState.
@Override
public void saveState(FileTailerState state) throws FileTailerStateProcessorException {
try {
Preconditions.checkNotNull(state);
} catch (NullPointerException e) {
LOG.info("Cannot save null state");
return;
}
createDirs(stateDir);
LOG.debug("Start saving File Tailer state ..");
try {
JsonWriter jsonWriter = new JsonWriter(Files.newWriter(stateFile, UTF_8));
try {
GSON.toJson(state, FileTailerState.class, jsonWriter);
LOG.debug("File Tailer state saved successfully");
} finally {
try {
jsonWriter.close();
} catch (IOException e) {
LOG.error("Cannot close JSON Writer for file {}: {}", stateFile.getAbsolutePath(), e.getMessage(), e);
}
}
} catch (IOException e) {
LOG.error("Cannot close JSON Writer for file {}: {}", stateFile.getAbsolutePath(), e.getMessage(), e);
}
}
use of com.google.gson.stream.JsonWriter in project cdap by caskdata.
the class Configuration method dumpConfiguration.
/**
* Writes out all the parameters and their properties (final and resource) to
* the given {@link Writer}
* The format of the output would be
* { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2,
* key2.isFinal,key2.resource}... ] }
* It does not output the parameters of the configuration object which is
* loaded from an input stream.
* @param out the Writer to write to
* @throws IOException
*/
public static void dumpConfiguration(Configuration config, Writer out) throws IOException {
JsonWriter dumpGenerator = new JsonWriter(out);
dumpGenerator.beginObject();
dumpGenerator.name("properties");
dumpGenerator.beginArray();
dumpGenerator.flush();
synchronized (config) {
for (Map.Entry<Object, Object> item : config.getProps().entrySet()) {
dumpGenerator.beginObject();
dumpGenerator.name("key").value((String) item.getKey());
dumpGenerator.name("value").value(config.get((String) item.getKey()));
dumpGenerator.name("isFinal").value(config.finalParameters.contains(item.getKey()));
dumpGenerator.name("resource").value(config.updatingResource.get(item.getKey()));
dumpGenerator.endObject();
}
}
dumpGenerator.endArray();
dumpGenerator.endObject();
dumpGenerator.flush();
}
use of com.google.gson.stream.JsonWriter in project hive by apache.
the class LineageLogger method run.
@Override
public void run(HookContext hookContext) {
assert (hookContext.getHookType() == HookType.POST_EXEC_HOOK);
QueryPlan plan = hookContext.getQueryPlan();
Index index = hookContext.getIndex();
SessionState ss = SessionState.get();
if (ss != null && index != null && OPERATION_NAMES.contains(plan.getOperationName()) && !plan.isExplain()) {
try {
StringBuilderWriter out = new StringBuilderWriter(1024);
JsonWriter writer = new JsonWriter(out);
String queryStr = plan.getQueryStr().trim();
writer.beginObject();
writer.name("version").value(FORMAT_VERSION);
HiveConf conf = ss.getConf();
boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
if (!testMode) {
// Don't emit user/timestamp info in test mode,
// so that the test golden output file is fixed.
long queryTime = plan.getQueryStartTime().longValue();
if (queryTime == 0)
queryTime = System.currentTimeMillis();
long duration = System.currentTimeMillis() - queryTime;
writer.name("user").value(hookContext.getUgi().getUserName());
writer.name("timestamp").value(queryTime / 1000);
writer.name("duration").value(duration);
writer.name("jobIds");
writer.beginArray();
List<TaskRunner> tasks = hookContext.getCompleteTaskList();
if (tasks != null && !tasks.isEmpty()) {
for (TaskRunner task : tasks) {
String jobId = task.getTask().getJobID();
if (jobId != null) {
writer.value(jobId);
}
}
}
writer.endArray();
}
writer.name("engine").value(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE));
writer.name("database").value(ss.getCurrentDatabase());
writer.name("hash").value(getQueryHash(queryStr));
writer.name("queryText").value(queryStr);
List<Edge> edges = getEdges(plan, index);
Set<Vertex> vertices = getVertices(edges);
writeEdges(writer, edges, hookContext.getConf());
writeVertices(writer, vertices);
writer.endObject();
writer.close();
// Logger the lineage info
String lineage = out.toString();
if (testMode) {
// Logger to console
log(lineage);
} else {
// In non-test mode, emit to a log file,
// which can be different from the normal hive.log.
// For example, using NoDeleteRollingFileAppender to
// log to some file with different rolling policy.
LOG.info(lineage);
}
} catch (Throwable t) {
// Don't fail the query just because of any lineage issue.
log("Failed to log lineage graph, query is not affected\n" + org.apache.hadoop.util.StringUtils.stringifyException(t));
}
}
}
use of com.google.gson.stream.JsonWriter in project gradle by gradle.
the class ModuleMetadataFileGenerator method generateTo.
public void generateTo(PublicationInternal publication, Collection<? extends PublicationInternal> publications, Writer writer) throws IOException {
// Collect a map from component to coordinates. This might be better to move to the component or some publications model
Map<SoftwareComponent, ComponentData> coordinates = new HashMap<SoftwareComponent, ComponentData>();
collectCoordinates(publications, coordinates);
// Collect a map from component to its owning component. This might be better to move to the component or some publications model
Map<SoftwareComponent, SoftwareComponent> owners = new HashMap<SoftwareComponent, SoftwareComponent>();
collectOwners(publications, owners);
// Write the output
JsonWriter jsonWriter = new JsonWriter(writer);
jsonWriter.setHtmlSafe(false);
jsonWriter.setIndent(" ");
writeComponentWithVariants(publication, publication.getComponent(), coordinates, owners, jsonWriter);
jsonWriter.flush();
writer.append('\n');
}
use of com.google.gson.stream.JsonWriter in project linuxtools by eclipse.
the class TestOSIORestPostNewComment method testPostNewComment.
@Test
public void testPostNewComment() throws Exception {
TestData testData = new TestData();
TestUtils.initSpaces(requestProvider, testData);
OSIORestClient client = connector.getClient(repository, requestProvider);
OSIORestConfiguration config = client.getConfiguration(repository, new NullOperationMonitor());
config.setSpaces(testData.spaceMap);
connector.setConfiguration(config);
RepositoryLocation location = client.getClient().getLocation();
location.setProperty(IOSIORestConstants.REPOSITORY_AUTH_ID, "user");
location.setProperty(IOSIORestConstants.REPOSITORY_AUTH_TOKEN, "xxxxxxTokenxxxxxx");
AbstractTaskDataHandler taskDataHandler = connector.getTaskDataHandler();
TaskAttributeMapper mapper = taskDataHandler.getAttributeMapper(repository);
TaskData taskData = new TaskData(mapper, repository.getConnectorKind(), repository.getRepositoryUrl(), "");
OSIORestTaskSchema.getDefault().initialize(taskData);
Set<TaskAttribute> attributes = new LinkedHashSet<>();
TaskAttribute newComment = taskData.getRoot().getAttribute(OSIORestTaskSchema.getDefault().NEW_COMMENT.getKey());
newComment.setValue("This is a test comment");
attributes.add(newComment);
OSIORestPostNewCommentTask data = new OSIORestPostNewCommentTask(client.getClient(), taskData, attributes);
OSIORestPostNewCommentTask.TaskAttributeTypeAdapter adapter = data.new TaskAttributeTypeAdapter(location);
OSIORestPostNewCommentTask.OldAttributes oldAttributes = data.new OldAttributes(attributes);
StringWriter s = new StringWriter();
JsonWriter writer = new JsonWriter(s);
adapter.write(writer, oldAttributes);
assertEquals("{\"data\":{\"attributes\":{\"body\":\"This is a test comment\",\"markup\":\"Markdown\"},\"type\":\"comments\"},\"included\":[]}", s.getBuffer().toString());
}
Aggregations