use of com.fasterxml.jackson.databind.JsonNode in project buck by facebook.
the class TargetsCommandIntegrationTest method testJsonOutputWithShowCellPath.
@Test
public void testJsonOutputWithShowCellPath() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "output_path", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckCommand("targets", "--json", "--show-cell-path", "//:test");
ObjectMapper objectMapper = ObjectMappers.newDefaultInstance();
// Parse the observed JSON.
JsonNode observed = objectMapper.readTree(objectMapper.getFactory().createParser(result.getStdout()));
assertTrue(observed.isArray());
JsonNode targetNode = observed.get(0);
assertTrue(targetNode.isObject());
JsonNode cellPath = targetNode.get("buck.cell_path");
assertNotNull(cellPath);
assertEquals(cellPath.asText(), MorePaths.pathWithPlatformSeparators(tmp.getRoot().toRealPath()));
}
use of com.fasterxml.jackson.databind.JsonNode in project druid by druid-io.
the class JSONToLowerParser method parse.
@Override
public Map<String, Object> parse(String input) {
try {
Map<String, Object> map = new LinkedHashMap<>();
JsonNode root = objectMapper.readTree(input);
Iterator<String> keysIter = (fieldNames == null ? root.fieldNames() : fieldNames.iterator());
while (keysIter.hasNext()) {
String key = keysIter.next();
if (exclude.contains(key.toLowerCase())) {
continue;
}
JsonNode node = root.path(key);
if (node.isArray()) {
final List<Object> nodeValue = Lists.newArrayListWithExpectedSize(node.size());
for (final JsonNode subnode : node) {
final Object subnodeValue = valueFunction.apply(subnode);
if (subnodeValue != null) {
nodeValue.add(subnodeValue);
}
}
// difference from JSONParser parse()
map.put(key.toLowerCase(), nodeValue);
} else {
final Object nodeValue = valueFunction.apply(node);
if (nodeValue != null) {
// difference from JSONParser parse()
map.put(key.toLowerCase(), nodeValue);
}
}
}
return map;
} catch (Exception e) {
throw new ParseException(e, "Unable to parse row [%s]", input);
}
}
use of com.fasterxml.jackson.databind.JsonNode in project elasticsearch by elastic.
the class GeoIpCacheTests method testCachesAndEvictsResults.
public void testCachesAndEvictsResults() throws Exception {
GeoIpCache cache = new GeoIpCache(1);
final NodeCache.Loader loader = key -> new IntNode(key);
JsonNode jsonNode1 = cache.get(1, loader);
assertSame(jsonNode1, cache.get(1, loader));
// evict old key by adding another value
cache.get(2, loader);
assertNotSame(jsonNode1, cache.get(1, loader));
}
use of com.fasterxml.jackson.databind.JsonNode in project zeppelin by apache.
the class SparkRInterpreter method interpret.
@Override
public InterpreterResult interpret(String lines, InterpreterContext interpreterContext) {
SparkInterpreter sparkInterpreter = getSparkInterpreter();
sparkInterpreter.populateSparkWebUrl(interpreterContext);
String jobGroup = Utils.buildJobGroupId(interpreterContext);
sparkInterpreter.getSparkContext().setJobGroup(jobGroup, "Zeppelin", false);
String imageWidth = getProperty("zeppelin.R.image.width");
String[] sl = lines.split("\n");
if (sl[0].contains("{") && sl[0].contains("}")) {
String jsonConfig = sl[0].substring(sl[0].indexOf("{"), sl[0].indexOf("}") + 1);
ObjectMapper m = new ObjectMapper();
try {
JsonNode rootNode = m.readTree(jsonConfig);
JsonNode imageWidthNode = rootNode.path("imageWidth");
if (!imageWidthNode.isMissingNode())
imageWidth = imageWidthNode.textValue();
} catch (Exception e) {
logger.warn("Can not parse json config: " + jsonConfig, e);
} finally {
lines = lines.replace(jsonConfig, "");
}
}
String setJobGroup = "";
// assign setJobGroup to dummy__, otherwise it would print NULL for this statement
if (Utils.isSpark2()) {
setJobGroup = "dummy__ <- setJobGroup(\"" + jobGroup + "\", \"zeppelin sparkR job group description\", TRUE)";
} else if (getSparkInterpreter().getSparkVersion().newerThanEquals(SparkVersion.SPARK_1_5_0)) {
setJobGroup = "dummy__ <- setJobGroup(sc, \"" + jobGroup + "\", \"zeppelin sparkR job group description\", TRUE)";
}
logger.debug("set JobGroup:" + setJobGroup);
lines = setJobGroup + "\n" + lines;
try {
// render output with knitr
if (useKnitr()) {
zeppelinR.setInterpreterOutput(null);
zeppelinR.set(".zcmd", "\n```{r " + renderOptions + "}\n" + lines + "\n```");
zeppelinR.eval(".zres <- knit2html(text=.zcmd)");
String html = zeppelinR.getS0(".zres");
RDisplay rDisplay = render(html, imageWidth);
return new InterpreterResult(rDisplay.code(), rDisplay.type(), rDisplay.content());
} else {
// alternatively, stream the output (without knitr)
zeppelinR.setInterpreterOutput(interpreterContext.out);
zeppelinR.eval(lines);
return new InterpreterResult(InterpreterResult.Code.SUCCESS, "");
}
} catch (Exception e) {
logger.error("Exception while connecting to R", e);
return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage());
} finally {
try {
} catch (Exception e) {
// Do nothing...
}
}
}
use of com.fasterxml.jackson.databind.JsonNode in project hive by apache.
the class TestCodahaleMetrics method testFileReporting.
@Test
public void testFileReporting() throws Exception {
int runs = 5;
for (int i = 0; i < runs; i++) {
MetricsFactory.getInstance().incrementCounter("count2");
}
byte[] jsonData = MetricsTestUtils.getFileData(jsonReportFile.getAbsolutePath(), 2000, 3);
ObjectMapper objectMapper = new ObjectMapper();
JsonNode rootNode = objectMapper.readTree(jsonData);
JsonNode countersNode = rootNode.path("counters");
JsonNode methodCounterNode = countersNode.path("count2");
JsonNode countNode = methodCounterNode.path("count");
Assert.assertEquals(countNode.asInt(), 5);
}
Aggregations