use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project druid by druid-io.
the class DimensionTopNMetricSpecTest method testSerdeLexicographicDimensionTopNMetricSpec.
@Test
public void testSerdeLexicographicDimensionTopNMetricSpec() throws IOException {
DimensionTopNMetricSpec expectedMetricSpec = new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC);
DimensionTopNMetricSpec expectedMetricSpec1 = new DimensionTopNMetricSpec("test", StringComparators.LEXICOGRAPHIC);
String jsonSpec = "{\n" + " \"type\": \"dimension\"," + " \"ordering\": \"lexicographic\"\n" + "}";
String jsonSpec1 = "{\n" + " \"type\": \"dimension\"," + " \"ordering\": \"lexicographic\",\n" + " \"previousStop\": \"test\"\n" + "}";
ObjectMapper jsonMapper = new DefaultObjectMapper();
TopNMetricSpec actualMetricSpec = jsonMapper.readValue(jsonMapper.writeValueAsString(jsonMapper.readValue(jsonSpec, TopNMetricSpec.class)), DimensionTopNMetricSpec.class);
TopNMetricSpec actualMetricSpec1 = jsonMapper.readValue(jsonMapper.writeValueAsString(jsonMapper.readValue(jsonSpec1, TopNMetricSpec.class)), DimensionTopNMetricSpec.class);
Assert.assertEquals(expectedMetricSpec, actualMetricSpec);
Assert.assertEquals(expectedMetricSpec1, actualMetricSpec1);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project vert.x by eclipse.
the class JsonMapperTest method testGetSetMapper.
@Test
public void testGetSetMapper() {
ObjectMapper mapper = Json.mapper;
assertNotNull(mapper);
ObjectMapper newMapper = new ObjectMapper();
Json.mapper = newMapper;
assertSame(newMapper, Json.mapper);
Json.mapper = mapper;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project zeppelin by apache.
the class SparkRInterpreter method interpret.
@Override
public InterpreterResult interpret(String lines, InterpreterContext interpreterContext) {
SparkInterpreter sparkInterpreter = getSparkInterpreter();
sparkInterpreter.populateSparkWebUrl(interpreterContext);
String jobGroup = Utils.buildJobGroupId(interpreterContext);
sparkInterpreter.getSparkContext().setJobGroup(jobGroup, "Zeppelin", false);
String imageWidth = getProperty("zeppelin.R.image.width");
String[] sl = lines.split("\n");
if (sl[0].contains("{") && sl[0].contains("}")) {
String jsonConfig = sl[0].substring(sl[0].indexOf("{"), sl[0].indexOf("}") + 1);
ObjectMapper m = new ObjectMapper();
try {
JsonNode rootNode = m.readTree(jsonConfig);
JsonNode imageWidthNode = rootNode.path("imageWidth");
if (!imageWidthNode.isMissingNode())
imageWidth = imageWidthNode.textValue();
} catch (Exception e) {
logger.warn("Can not parse json config: " + jsonConfig, e);
} finally {
lines = lines.replace(jsonConfig, "");
}
}
String setJobGroup = "";
// assign setJobGroup to dummy__, otherwise it would print NULL for this statement
if (Utils.isSpark2()) {
setJobGroup = "dummy__ <- setJobGroup(\"" + jobGroup + "\", \"zeppelin sparkR job group description\", TRUE)";
} else if (getSparkInterpreter().getSparkVersion().newerThanEquals(SparkVersion.SPARK_1_5_0)) {
setJobGroup = "dummy__ <- setJobGroup(sc, \"" + jobGroup + "\", \"zeppelin sparkR job group description\", TRUE)";
}
logger.debug("set JobGroup:" + setJobGroup);
lines = setJobGroup + "\n" + lines;
try {
// render output with knitr
if (useKnitr()) {
zeppelinR.setInterpreterOutput(null);
zeppelinR.set(".zcmd", "\n```{r " + renderOptions + "}\n" + lines + "\n```");
zeppelinR.eval(".zres <- knit2html(text=.zcmd)");
String html = zeppelinR.getS0(".zres");
RDisplay rDisplay = render(html, imageWidth);
return new InterpreterResult(rDisplay.code(), rDisplay.type(), rDisplay.content());
} else {
// alternatively, stream the output (without knitr)
zeppelinR.setInterpreterOutput(interpreterContext.out);
zeppelinR.eval(lines);
return new InterpreterResult(InterpreterResult.Code.SUCCESS, "");
}
} catch (Exception e) {
logger.error("Exception while connecting to R", e);
return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage());
} finally {
try {
} catch (Exception e) {
// Do nothing...
}
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project hadoop by apache.
the class EntityGroupFSTimelineStore method serviceStart.
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
LOG.info("Starting {}", getName());
summaryStore.start();
Configuration conf = getConfig();
aclManager = new TimelineACLsManager(conf);
aclManager.setTimelineStore(summaryStore);
summaryTdm = new TimelineDataManager(summaryStore, aclManager);
summaryTdm.init(conf);
addService(summaryTdm);
// start child services that aren't already started
super.serviceStart();
if (!fs.exists(activeRootPath)) {
fs.mkdirs(activeRootPath);
fs.setPermission(activeRootPath, ACTIVE_DIR_PERMISSION);
}
if (!fs.exists(doneRootPath)) {
fs.mkdirs(doneRootPath);
fs.setPermission(doneRootPath, DONE_DIR_PERMISSION);
}
objMapper = new ObjectMapper();
objMapper.setAnnotationIntrospector(new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
jsonFactory = new MappingJsonFactory(objMapper);
final long scanIntervalSecs = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SCAN_INTERVAL_SECONDS, YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SCAN_INTERVAL_SECONDS_DEFAULT);
final long cleanerIntervalSecs = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_CLEANER_INTERVAL_SECONDS, YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_CLEANER_INTERVAL_SECONDS_DEFAULT);
final int numThreads = conf.getInt(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_THREADS, YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_THREADS_DEFAULT);
LOG.info("Scanning active directory {} every {} seconds", activeRootPath, scanIntervalSecs);
LOG.info("Cleaning logs every {} seconds", cleanerIntervalSecs);
executor = new ScheduledThreadPoolExecutor(numThreads, new ThreadFactoryBuilder().setNameFormat("EntityLogPluginWorker #%d").build());
executor.scheduleAtFixedRate(new EntityLogScanner(), 0, scanIntervalSecs, TimeUnit.SECONDS);
executor.scheduleAtFixedRate(new EntityLogCleaner(), cleanerIntervalSecs, cleanerIntervalSecs, TimeUnit.SECONDS);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project hive by apache.
the class TestCodahaleMetrics method testFileReporting.
@Test
public void testFileReporting() throws Exception {
int runs = 5;
for (int i = 0; i < runs; i++) {
MetricsFactory.getInstance().incrementCounter("count2");
}
byte[] jsonData = MetricsTestUtils.getFileData(jsonReportFile.getAbsolutePath(), 2000, 3);
ObjectMapper objectMapper = new ObjectMapper();
JsonNode rootNode = objectMapper.readTree(jsonData);
JsonNode countersNode = rootNode.path("counters");
JsonNode methodCounterNode = countersNode.path("count2");
JsonNode countNode = methodCounterNode.path("count");
Assert.assertEquals(countNode.asInt(), 5);
}
Aggregations