use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode in project zeppelin by apache.
the class SparkRInterpreter method interpret.
@Override
public InterpreterResult interpret(String lines, InterpreterContext interpreterContext) {
SparkInterpreter sparkInterpreter = getSparkInterpreter();
sparkInterpreter.populateSparkWebUrl(interpreterContext);
String jobGroup = Utils.buildJobGroupId(interpreterContext);
sparkInterpreter.getSparkContext().setJobGroup(jobGroup, "Zeppelin", false);
String imageWidth = getProperty("zeppelin.R.image.width");
String[] sl = lines.split("\n");
if (sl[0].contains("{") && sl[0].contains("}")) {
String jsonConfig = sl[0].substring(sl[0].indexOf("{"), sl[0].indexOf("}") + 1);
ObjectMapper m = new ObjectMapper();
try {
JsonNode rootNode = m.readTree(jsonConfig);
JsonNode imageWidthNode = rootNode.path("imageWidth");
if (!imageWidthNode.isMissingNode())
imageWidth = imageWidthNode.textValue();
} catch (Exception e) {
logger.warn("Can not parse json config: " + jsonConfig, e);
} finally {
lines = lines.replace(jsonConfig, "");
}
}
String setJobGroup = "";
// assign setJobGroup to dummy__, otherwise it would print NULL for this statement
if (Utils.isSpark2()) {
setJobGroup = "dummy__ <- setJobGroup(\"" + jobGroup + "\", \"zeppelin sparkR job group description\", TRUE)";
} else if (getSparkInterpreter().getSparkVersion().newerThanEquals(SparkVersion.SPARK_1_5_0)) {
setJobGroup = "dummy__ <- setJobGroup(sc, \"" + jobGroup + "\", \"zeppelin sparkR job group description\", TRUE)";
}
logger.debug("set JobGroup:" + setJobGroup);
lines = setJobGroup + "\n" + lines;
try {
// render output with knitr
if (useKnitr()) {
zeppelinR.setInterpreterOutput(null);
zeppelinR.set(".zcmd", "\n```{r " + renderOptions + "}\n" + lines + "\n```");
zeppelinR.eval(".zres <- knit2html(text=.zcmd)");
String html = zeppelinR.getS0(".zres");
RDisplay rDisplay = render(html, imageWidth);
return new InterpreterResult(rDisplay.code(), rDisplay.type(), rDisplay.content());
} else {
// alternatively, stream the output (without knitr)
zeppelinR.setInterpreterOutput(interpreterContext.out);
zeppelinR.eval(lines);
return new InterpreterResult(InterpreterResult.Code.SUCCESS, "");
}
} catch (Exception e) {
logger.error("Exception while connecting to R", e);
return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage());
} finally {
try {
} catch (Exception e) {
// Do nothing...
}
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode in project hive by apache.
the class TestCodahaleMetrics method testFileReporting.
@Test
public void testFileReporting() throws Exception {
int runs = 5;
for (int i = 0; i < runs; i++) {
MetricsFactory.getInstance().incrementCounter("count2");
}
byte[] jsonData = MetricsTestUtils.getFileData(jsonReportFile.getAbsolutePath(), 2000, 3);
ObjectMapper objectMapper = new ObjectMapper();
JsonNode rootNode = objectMapper.readTree(jsonData);
JsonNode countersNode = rootNode.path("counters");
JsonNode methodCounterNode = countersNode.path("count2");
JsonNode countNode = methodCounterNode.path("count");
Assert.assertEquals(countNode.asInt(), 5);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode in project pinot by linkedin.
the class AutoLoadPinotMetricsService method loadDatasets.
/**
* Reads all table names in pinot, and loads their schema
* @throws IOException
*/
private void loadDatasets() throws IOException {
JsonNode tables = autoLoadPinotMetricsUtils.getAllTablesFromPinot();
for (JsonNode table : tables) {
String dataset = table.asText();
Schema schema = autoLoadPinotMetricsUtils.getSchemaFromPinot(dataset);
if (schema != null) {
if (!autoLoadPinotMetricsUtils.verifySchemaCorrectness(schema)) {
LOG.info("Skipping {} due to incorrect schema", dataset);
} else {
allDatasets.add(dataset);
allSchemas.put(dataset, schema);
}
}
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode in project pinot by linkedin.
the class AutoLoadPinotMetricsUtils method getAllTablesFromPinot.
public JsonNode getAllTablesFromPinot() throws IOException {
HttpGet tablesReq = new HttpGet(PINOT_TABLES_ENDPOINT);
LOG.info("Retrieving datasets: {}", tablesReq);
CloseableHttpResponse tablesRes = pinotControllerClient.execute(pinotControllerHost, tablesReq);
JsonNode tables = null;
try {
if (tablesRes.getStatusLine().getStatusCode() != 200) {
throw new IllegalStateException(tablesRes.getStatusLine().toString());
}
InputStream tablesContent = tablesRes.getEntity().getContent();
tables = new ObjectMapper().readTree(tablesContent).get("tables");
} catch (Exception e) {
LOG.error("Exception in loading collections", e);
} finally {
if (tablesRes.getEntity() != null) {
EntityUtils.consume(tablesRes.getEntity());
}
tablesRes.close();
}
return tables;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode in project jmxtrans by jmxtrans.
the class JsonUtils method parseProcess.
/**
* Uses jackson to load json configuration from a File into a full object
* tree representation of that json.
*/
public JmxProcess parseProcess(File file) throws IOException {
JsonNode jsonNode = mapper.readTree(file);
JmxProcess jmx = mapper.treeToValue(jsonNode, JmxProcess.class);
jmx.setName(file.getName());
return jmx;
}
Aggregations