use of com.google.gson.stream.JsonWriter in project hive by apache.
the class LineageLogger method run.
@Override
public void run(HookContext hookContext) {
assert (hookContext.getHookType() == HookType.POST_EXEC_HOOK);
QueryPlan plan = hookContext.getQueryPlan();
Index index = hookContext.getIndex();
SessionState ss = SessionState.get();
if (ss != null && index != null && OPERATION_NAMES.contains(plan.getOperationName()) && !plan.isExplain()) {
try {
StringBuilderWriter out = new StringBuilderWriter(1024);
JsonWriter writer = new JsonWriter(out);
String queryStr = plan.getQueryStr().trim();
writer.beginObject();
writer.name("version").value(FORMAT_VERSION);
HiveConf conf = ss.getConf();
boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
if (!testMode) {
// Don't emit user/timestamp info in test mode,
// so that the test golden output file is fixed.
long queryTime = plan.getQueryStartTime().longValue();
if (queryTime == 0)
queryTime = System.currentTimeMillis();
long duration = System.currentTimeMillis() - queryTime;
writer.name("user").value(hookContext.getUgi().getUserName());
writer.name("timestamp").value(queryTime / 1000);
writer.name("duration").value(duration);
writer.name("jobIds");
writer.beginArray();
List<TaskRunner> tasks = hookContext.getCompleteTaskList();
if (tasks != null && !tasks.isEmpty()) {
for (TaskRunner task : tasks) {
String jobId = task.getTask().getJobID();
if (jobId != null) {
writer.value(jobId);
}
}
}
writer.endArray();
}
writer.name("engine").value(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE));
writer.name("database").value(ss.getCurrentDatabase());
writer.name("hash").value(getQueryHash(queryStr));
writer.name("queryText").value(queryStr);
List<Edge> edges = getEdges(plan, index);
Set<Vertex> vertices = getVertices(edges);
writeEdges(writer, edges);
writeVertices(writer, vertices);
writer.endObject();
writer.close();
// Logger the lineage info
String lineage = out.toString();
if (testMode) {
// Logger to console
log(lineage);
} else {
// In non-test mode, emit to a log file,
// which can be different from the normal hive.log.
// For example, using NoDeleteRollingFileAppender to
// log to some file with different rolling policy.
LOG.info(lineage);
}
} catch (Throwable t) {
// Don't fail the query just because of any lineage issue.
log("Failed to log lineage graph, query is not affected\n" + org.apache.hadoop.util.StringUtils.stringifyException(t));
}
}
}
use of com.google.gson.stream.JsonWriter in project buck by facebook.
the class WorkerProcessProtocolZeroTest method testSendCommand.
@Test
public void testSendCommand() throws IOException {
StringWriter jsonSentToWorkerProcess = new StringWriter();
WorkerProcessProtocol protocol = new WorkerProcessProtocolZero(fakeProcessExecutor, fakeLaunchedProcess, new JsonWriter(jsonSentToWorkerProcess), dummyJsonReader, newTempFile());
int messageID = 123;
Path argsPath = Paths.get("args");
Path stdoutPath = Paths.get("stdout");
Path stderrPath = Paths.get("stderr");
protocol.sendCommand(messageID, WorkerProcessCommand.of(argsPath, stdoutPath, stderrPath));
String expectedJson = String.format("{\"id\":%d,\"type\":\"command\"," + "\"args_path\":\"%s\",\"stdout_path\":\"%s\",\"stderr_path\":\"%s\"}", messageID, argsPath.toString(), stdoutPath.toString(), stderrPath.toString());
assertThat(jsonSentToWorkerProcess.toString(), Matchers.containsString(expectedJson));
}
use of com.google.gson.stream.JsonWriter in project buck by facebook.
the class WorkerProcessProtocolZeroTest method setUp.
@Before
public void setUp() throws IOException {
ProcessExecutorParams fakeParams = ProcessExecutorParams.ofCommand("");
fakeProcess = new FakeProcess(0);
fakeProcessExecutor = new FakeProcessExecutor(ImmutableMap.of(fakeParams, fakeProcess));
fakeLaunchedProcess = fakeProcessExecutor.launchProcess(fakeParams);
dummyJsonWriter = new JsonWriter(new StringWriter());
dummyJsonReader = new JsonReader(new StringReader(""));
}
use of com.google.gson.stream.JsonWriter in project buck by facebook.
the class WorkerProcessProtocolZeroTest method testClose.
@Test
public void testClose() throws IOException {
StringWriter jsonSentToWorkerProcess = new StringWriter();
JsonWriter writer = new JsonWriter(jsonSentToWorkerProcess);
// write an opening bracket now, so the writer doesn't throw due to invalid JSON when it goes
// to write the closing bracket
writer.beginArray();
// add an opening bracket and consume it now, so that the reader doesn't throw due to invalid
// JSON when it goes to read the closing bracket
JsonReader reader = new JsonReader(new StringReader("[]"));
reader.beginArray();
WorkerProcessProtocol protocol = new WorkerProcessProtocolZero(fakeProcessExecutor, fakeLaunchedProcess, writer, reader, newTempFile());
protocol.close();
String expectedJson = "]";
assertThat(jsonSentToWorkerProcess.toString(), Matchers.endsWith(expectedJson));
assertTrue(fakeProcess.isDestroyed());
}
use of com.google.gson.stream.JsonWriter in project iosched by google.
the class APIUpdater method run.
public void run(boolean force, boolean obfuscate, OutputStream optionalOutput) throws IOException {
RemoteFilesEntityFetcherFactory.setBuilder(new RemoteFilesEntityFetcherFactory.FetcherBuilder() {
String[] filenames;
@Override
public RemoteFilesEntityFetcherFactory.FetcherBuilder setSourceFiles(String... filenames) {
this.filenames = filenames;
return this;
}
@Override
public EntityFetcher build() {
return new CloudStorageRemoteFilesEntityFetcher(filenames);
}
});
UpdateRunLogger logger = new UpdateRunLogger();
CloudFileManager fileManager = new CloudFileManager();
logger.startTimer();
JsonDataSources sources = new ExtraInput().fetchAllDataSources();
logger.stopTimer("fetchExtraAPI");
logger.startTimer();
sources.putAll(new VendorStaticInput().fetchAllDataSources());
logger.stopTimer("fetchVendorStaticAPI");
logger.startTimer();
JsonObject newData = new DataExtractor(obfuscate).extractFromDataSources(sources);
logger.stopTimer("extractOurData");
logger.startTimer();
byte[] newHash = CloudFileManager.calulateHash(newData);
logger.stopTimer("calculateHash");
// compare current Vendor API log with the one from previous run:
logger.startTimer();
if (!force && isUpToDate(newHash, logger)) {
logger.logNoopRun();
return;
}
logger.stopTimer("compareHash");
logger.startTimer();
ManifestData dataProduction = extractManifestData(fileManager.readProductionManifest(), null);
//ManifestData dataStaging = extractManifestData(fileManager.readStagingManifest(), dataProduction);
logger.stopTimer("readManifest");
JsonWriter optionalOutputWriter = null;
logger.startTimer();
// Upload a new version of the sessions file
if (optionalOutput != null) {
// send data to the outputstream
Writer writer = Channels.newWriter(Channels.newChannel(optionalOutput), "UTF-8");
optionalOutputWriter = new JsonWriter(writer);
optionalOutputWriter.setIndent(" ");
new Gson().toJson(newData, optionalOutputWriter);
optionalOutputWriter.flush();
} else {
// save data to the CloudStorage
fileManager.createOrUpdate(dataProduction.sessionsFilename, newData, false);
}
logger.stopTimer("uploadNewSessionsFile");
// Check data consistency
logger.startTimer();
DataCheck checker = new DataCheck(fileManager);
CheckResult result = checker.check(sources, newData, dataProduction);
if (!result.failures.isEmpty()) {
reportDataCheckFailures(result, optionalOutput);
}
logger.stopTimer("runDataCheck");
if (optionalOutput == null) {
// Only update manifest and log if saving to persistent storage
logger.startTimer();
// Create new manifests
JsonObject newProductionManifest = new JsonObject();
newProductionManifest.add("format", new JsonPrimitive(Config.MANIFEST_FORMAT_VERSION));
newProductionManifest.add("data_files", dataProduction.dataFiles);
JsonObject newStagingManifest = new JsonObject();
newStagingManifest.add("format", new JsonPrimitive(Config.MANIFEST_FORMAT_VERSION));
// newStagingManifest.add("data_files", dataStaging.dataFiles);
// save manifests to the CloudStorage
fileManager.createOrUpdateProductionManifest(newProductionManifest);
fileManager.createOrUpdateStagingManifest(newStagingManifest);
try {
// notify production GCM server:
new GCMPing().notifyGCMServer(Config.GCM_URL, Config.GCM_API_KEY);
} catch (Throwable t) {
Logger.getLogger(APIUpdater.class.getName()).log(Level.SEVERE, "Error while pinging GCM server", t);
}
logger.stopTimer("uploadManifest");
logger.logUpdateRun(dataProduction.majorVersion, dataProduction.minorVersion, dataProduction.sessionsFilename, newHash, newData, force);
}
}
Aggregations