use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class StandardDeviationPostAggregatorTest method testSerde.
@Test
public void testSerde() throws Exception {
StandardDeviationPostAggregator there = new StandardDeviationPostAggregator("post", "test_field", "population");
DefaultObjectMapper mapper = new DefaultObjectMapper();
StandardDeviationPostAggregator andBackAgain = mapper.readValue(mapper.writeValueAsString(there), StandardDeviationPostAggregator.class);
Assert.assertEquals(there, andBackAgain);
Assert.assertArrayEquals(there.getCacheKey(), andBackAgain.getCacheKey());
Assert.assertEquals(there.getDependentFields(), andBackAgain.getDependentFields());
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class MultiplePathSpecTest method testSerde.
@Test
public void testSerde() throws Exception {
PathSpec expected = new MultiplePathSpec(Lists.newArrayList(new StaticPathSpec("/tmp/path1", null), new StaticPathSpec("/tmp/path2", TextInputFormat.class)));
ObjectMapper jsonMapper = new DefaultObjectMapper();
PathSpec actual = jsonMapper.readValue(jsonMapper.writeValueAsString(expected), PathSpec.class);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class SurrogateActionTest method testSerde.
@Test
public void testSerde() throws IOException {
final ObjectMapper objectMapper = new DefaultObjectMapper();
final SurrogateAction<TaskLock, TimeChunkLockTryAcquireAction> surrogateAction = new SurrogateAction<>("testId", new TimeChunkLockTryAcquireAction(TaskLockType.EXCLUSIVE, Intervals.of("2018-01-01/2019-01-01")));
final String json = objectMapper.writeValueAsString(surrogateAction);
Assert.assertEquals(surrogateAction.toString(), objectMapper.readValue(json, TaskAction.class).toString());
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class AppenderatorDriverRealtimeIndexTaskTest method makeToolboxFactory.
private void makeToolboxFactory(final File directory) {
taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null));
publishedSegments = new CopyOnWriteArrayList<>();
ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerSubtypes(LinearShardSpec.class);
mapper.registerSubtypes(NumberedShardSpec.class);
IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(mapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnectorRule.getConnector()) {
@Override
public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) throws IOException {
Set<DataSegment> result = super.announceHistoricalSegments(segments);
Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null);
publishedSegments.addAll(result);
segments.forEach(s -> segmentLatch.countDown());
return result;
}
@Override
public SegmentPublishResult announceHistoricalSegments(Set<DataSegment> segments, Set<DataSegment> segmentsToDrop, DataSourceMetadata startMetadata, DataSourceMetadata endMetadata) throws IOException {
SegmentPublishResult result = super.announceHistoricalSegments(segments, segmentsToDrop, startMetadata, endMetadata);
Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null);
publishedSegments.addAll(result.getSegments());
result.getSegments().forEach(s -> segmentLatch.countDown());
return result;
}
};
taskLockbox = new TaskLockbox(taskStorage, mdc);
final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, true, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, taskStorage, mdc, EMITTER, EasyMock.createMock(SupervisorManager.class));
final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox, new TaskAuditLogConfig(false));
final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), (query, future) -> {
// do nothing
})));
handOffCallbacks = new ConcurrentHashMap<>();
final SegmentHandoffNotifierFactory handoffNotifierFactory = dataSource -> new SegmentHandoffNotifier() {
@Override
public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
handoffLatch.countDown();
return true;
}
@Override
public void start() {
// Noop
}
@Override
public void close() {
// Noop
}
};
final TestUtils testUtils = new TestUtils();
taskToolboxFactory = new TaskToolboxFactory(taskConfig, new DruidNode("druid/middlemanager", "localhost", false, 8091, null, true, false), taskActionClientFactory, EMITTER, new TestDataSegmentPusher(), new TestDataSegmentKiller(), // DataSegmentMover
null, // DataSegmentArchiver
null, new TestDataSegmentAnnouncer(), EasyMock.createNiceMock(DataSegmentServerAnnouncer.class), handoffNotifierFactory, () -> conglomerate, // queryExecutorService
DirectQueryProcessingPool.INSTANCE, NoopJoinableFactory.INSTANCE, () -> EasyMock.createMock(MonitorScheduler.class), new SegmentCacheManagerFactory(testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), new CachePopulatorStats(), testUtils.getTestIndexMergerV9(), EasyMock.createNiceMock(DruidNodeAnnouncer.class), EasyMock.createNiceMock(DruidNode.class), new LookupNodeService("tier"), new DataNodeService("tier", 1000, ServerType.INDEXER_EXECUTOR, 0), new SingleFileTaskReportFileWriter(reportsFile), null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, new NoopChatHandlerProvider(), testUtils.getRowIngestionMetersFactory(), new TestAppenderatorsManager(), new NoopIndexingServiceClient(), null, null, null);
}
use of org.apache.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class CompactionTaskTest method testSerdeWithUnknownTuningConfigThrowingError.
@Test
public void testSerdeWithUnknownTuningConfigThrowingError() throws IOException {
final OldCompactionTaskWithAnyTuningConfigType taskWithUnknownTuningConfig = new OldCompactionTaskWithAnyTuningConfigType(null, null, DATA_SOURCE, null, SEGMENTS, null, null, null, null, null, RealtimeTuningConfig.makeDefaultTuningConfig(null), null, OBJECT_MAPPER, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, toolbox.getRowIngestionMetersFactory(), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, toolbox.getAppenderatorsManager());
final ObjectMapper mapper = new DefaultObjectMapper((DefaultObjectMapper) OBJECT_MAPPER);
mapper.registerSubtypes(new NamedType(OldCompactionTaskWithAnyTuningConfigType.class, "compact"), new NamedType(RealtimeTuningConfig.class, "realtime"));
final byte[] bytes = mapper.writeValueAsBytes(taskWithUnknownTuningConfig);
expectedException.expect(ValueInstantiationException.class);
expectedException.expectCause(CoreMatchers.instanceOf(IllegalStateException.class));
expectedException.expectMessage("Unknown tuningConfig type: [org.apache.druid.segment.indexing.RealtimeTuningConfig]");
mapper.readValue(bytes, CompactionTask.class);
}
Aggregations