use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class DataSourceMetadataQueryTest method testContextSerde.
@Test
public void testContextSerde() throws Exception {
final DataSourceMetadataQuery query = Druids.newDataSourceMetadataQueryBuilder().dataSource("foo").intervals("2013/2014").context(ImmutableMap.of("priority", 1, "useCache", true, "populateCache", "true", "finalize", true)).build();
final ObjectMapper mapper = new DefaultObjectMapper();
final Query<?> serdeQuery = mapper.readValue(mapper.writeValueAsBytes(mapper.readValue(mapper.writeValueAsString(query), Query.class)), Query.class);
Assert.assertEquals((Integer) 1, serdeQuery.getContextValue(QueryContexts.PRIORITY_KEY));
Assert.assertEquals(true, serdeQuery.getContextValue("useCache"));
Assert.assertEquals("true", serdeQuery.getContextValue("populateCache"));
Assert.assertEquals(true, serdeQuery.getContextValue("finalize"));
Assert.assertEquals(true, serdeQuery.getContextBoolean("useCache", false));
Assert.assertEquals(true, serdeQuery.getContextBoolean("populateCache", false));
Assert.assertEquals(true, serdeQuery.getContextBoolean("finalize", false));
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class DataSourceMetadataQueryTest method testResultDeserialization.
@Test
public void testResultDeserialization() {
final Map<String, Object> resultValueMap = ImmutableMap.of("maxIngestedEventTime", "2000-01-01T00:00:00.000Z");
final DataSourceMetadataResultValue resultValue = new DefaultObjectMapper().convertValue(resultValueMap, DataSourceMetadataResultValue.class);
Assert.assertEquals(DateTimes.of("2000"), resultValue.getMaxIngestedEventTime());
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class BroadcastSegmentIndexedTableTest method setup.
@Before
public void setup() throws IOException, SegmentLoadingException {
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerModule(new SegmentizerModule());
final IndexIO indexIO = new IndexIO(mapper, () -> 0);
mapper.setInjectableValues(new InjectableValues.Std().addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE).addValue(ObjectMapper.class.getName(), mapper).addValue(IndexIO.class, indexIO).addValue(DataSegment.PruneSpecsHolder.class, DataSegment.PruneSpecsHolder.DEFAULT));
final IndexMerger indexMerger = new IndexMergerV9(mapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
Interval testInterval = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z");
IncrementalIndex data = TestIndex.makeRealtimeIndex("druid.sample.numeric.tsv");
File segment = new File(temporaryFolder.newFolder(), "segment");
File persisted = indexMerger.persist(data, testInterval, segment, new IndexSpec(), null);
File factoryJson = new File(persisted, "factory.json");
Assert.assertTrue(factoryJson.exists());
SegmentizerFactory factory = mapper.readValue(factoryJson, SegmentizerFactory.class);
Assert.assertTrue(factory instanceof MMappedQueryableSegmentizerFactory);
DataSegment dataSegment = new DataSegment(DATASOURCE, testInterval, DateTimes.nowUtc().toString(), ImmutableMap.of(), columnNames, ImmutableList.of(), null, null, segment.getTotalSpace());
backingSegment = (QueryableIndexSegment) factory.factorize(dataSegment, segment, false, SegmentLazyLoadFailCallback.NOOP);
columnNames = ImmutableList.<String>builder().add(ColumnHolder.TIME_COLUMN_NAME).addAll(backingSegment.asQueryableIndex().getColumnNames()).build();
broadcastTable = new BroadcastSegmentIndexedTable(backingSegment, keyColumns, dataSegment.getVersion());
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class EventReceiverFirehoseTest method setUp.
@Before
public void setUp() {
req = EasyMock.createMock(HttpServletRequest.class);
eventReceiverFirehoseFactory = new EventReceiverFirehoseFactory(SERVICE_NAME, CAPACITY, MAX_IDLE_TIME_MILLIS, null, new DefaultObjectMapper(), new DefaultObjectMapper(), register, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
firehose = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory.connect(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1"))), null, null, null)), null);
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class FireDepartmentTest method testSerde.
@Test
public void testSerde() throws Exception {
ObjectMapper jsonMapper = new DefaultObjectMapper();
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
FireDepartment schema = new FireDepartment(new DataSchema("foo", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim1", "dim2"))), null, null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("count") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null, jsonMapper), new RealtimeIOConfig(null, new RealtimePlumberSchool(null, null, null, null, null, null, null, NoopJoinableFactory.INSTANCE, TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()), TestHelper.getTestIndexIO(), MapCache.create(0), NO_CACHE_CONFIG, new CachePopulatorStats(), TestHelper.makeJsonMapper())), RealtimeTuningConfig.makeDefaultTuningConfig(new File("/tmp/nonexistent")));
String json = jsonMapper.writeValueAsString(schema);
FireDepartment newSchema = jsonMapper.readValue(json, FireDepartment.class);
Assert.assertEquals(schema.getDataSchema().getDataSource(), newSchema.getDataSchema().getDataSource());
Assert.assertEquals("/tmp/nonexistent", schema.getTuningConfig().getBasePersistDirectory().toString());
}
Aggregations