use of org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider in project druid by druid-io.
the class AppenderatorDriverRealtimeIndexTaskTest method makeToolboxFactory.
private void makeToolboxFactory(final File directory) {
taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null));
publishedSegments = new CopyOnWriteArrayList<>();
ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerSubtypes(LinearShardSpec.class);
mapper.registerSubtypes(NumberedShardSpec.class);
IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(mapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnectorRule.getConnector()) {
@Override
public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) throws IOException {
Set<DataSegment> result = super.announceHistoricalSegments(segments);
Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null);
publishedSegments.addAll(result);
segments.forEach(s -> segmentLatch.countDown());
return result;
}
@Override
public SegmentPublishResult announceHistoricalSegments(Set<DataSegment> segments, Set<DataSegment> segmentsToDrop, DataSourceMetadata startMetadata, DataSourceMetadata endMetadata) throws IOException {
SegmentPublishResult result = super.announceHistoricalSegments(segments, segmentsToDrop, startMetadata, endMetadata);
Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null);
publishedSegments.addAll(result.getSegments());
result.getSegments().forEach(s -> segmentLatch.countDown());
return result;
}
};
taskLockbox = new TaskLockbox(taskStorage, mdc);
final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, true, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, taskStorage, mdc, EMITTER, EasyMock.createMock(SupervisorManager.class));
final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox, new TaskAuditLogConfig(false));
final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), (query, future) -> {
// do nothing
})));
handOffCallbacks = new ConcurrentHashMap<>();
final SegmentHandoffNotifierFactory handoffNotifierFactory = dataSource -> new SegmentHandoffNotifier() {
@Override
public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
handoffLatch.countDown();
return true;
}
@Override
public void start() {
// Noop
}
@Override
public void close() {
// Noop
}
};
final TestUtils testUtils = new TestUtils();
taskToolboxFactory = new TaskToolboxFactory(taskConfig, new DruidNode("druid/middlemanager", "localhost", false, 8091, null, true, false), taskActionClientFactory, EMITTER, new TestDataSegmentPusher(), new TestDataSegmentKiller(), // DataSegmentMover
null, // DataSegmentArchiver
null, new TestDataSegmentAnnouncer(), EasyMock.createNiceMock(DataSegmentServerAnnouncer.class), handoffNotifierFactory, () -> conglomerate, // queryExecutorService
DirectQueryProcessingPool.INSTANCE, NoopJoinableFactory.INSTANCE, () -> EasyMock.createMock(MonitorScheduler.class), new SegmentCacheManagerFactory(testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), new CachePopulatorStats(), testUtils.getTestIndexMergerV9(), EasyMock.createNiceMock(DruidNodeAnnouncer.class), EasyMock.createNiceMock(DruidNode.class), new LookupNodeService("tier"), new DataNodeService("tier", 1000, ServerType.INDEXER_EXECUTOR, 0), new SingleFileTaskReportFileWriter(reportsFile), null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, new NoopChatHandlerProvider(), testUtils.getRowIngestionMetersFactory(), new TestAppenderatorsManager(), new NoopIndexingServiceClient(), null, null, null);
}
use of org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider in project druid by druid-io.
the class TaskLifecycleTest method setUpTaskToolboxFactory.
private TaskToolboxFactory setUpTaskToolboxFactory(DataSegmentPusher dataSegmentPusher, SegmentHandoffNotifierFactory handoffNotifierFactory, TestIndexerMetadataStorageCoordinator mdc, AppenderatorsManager appenderatorsManager) throws IOException {
Preconditions.checkNotNull(queryRunnerFactoryConglomerate);
Preconditions.checkNotNull(monitorScheduler);
Preconditions.checkNotNull(taskStorage);
Preconditions.checkNotNull(emitter);
taskLockbox = new TaskLockbox(taskStorage, mdc);
tac = new LocalTaskActionClientFactory(taskStorage, new TaskActionToolbox(taskLockbox, taskStorage, mdc, emitter, EasyMock.createMock(SupervisorManager.class)), new TaskAuditLogConfig(true));
File tmpDir = temporaryFolder.newFolder();
taskConfig = new TaskConfig(tmpDir.toString(), null, null, 50000, null, false, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
return new TaskToolboxFactory(taskConfig, new DruidNode("druid/middlemanager", "localhost", false, 8091, null, true, false), tac, emitter, dataSegmentPusher, new LocalDataSegmentKiller(new LocalDataSegmentPusherConfig()), new DataSegmentMover() {
@Override
public DataSegment move(DataSegment dataSegment, Map<String, Object> targetLoadSpec) {
return dataSegment;
}
}, new DataSegmentArchiver() {
@Override
public DataSegment archive(DataSegment segment) {
return segment;
}
@Override
public DataSegment restore(DataSegment segment) {
return segment;
}
}, new DataSegmentAnnouncer() {
@Override
public void announceSegment(DataSegment segment) {
announcedSinks++;
}
@Override
public void unannounceSegment(DataSegment segment) {
}
@Override
public void announceSegments(Iterable<DataSegment> segments) {
}
@Override
public void unannounceSegments(Iterable<DataSegment> segments) {
}
}, // segment announcer
EasyMock.createNiceMock(DataSegmentServerAnnouncer.class), handoffNotifierFactory, // query runner factory conglomerate corporation unionized collective
() -> queryRunnerFactoryConglomerate, // query executor service
DirectQueryProcessingPool.INSTANCE, NoopJoinableFactory.INSTANCE, // monitor scheduler
() -> monitorScheduler, new SegmentCacheManagerFactory(new DefaultObjectMapper()), MAPPER, INDEX_IO, MapCache.create(0), FireDepartmentTest.NO_CACHE_CONFIG, new CachePopulatorStats(), INDEX_MERGER_V9, EasyMock.createNiceMock(DruidNodeAnnouncer.class), EasyMock.createNiceMock(DruidNode.class), new LookupNodeService("tier"), new DataNodeService("tier", 1000, ServerType.INDEXER_EXECUTOR, 0), new NoopTestTaskReportFileWriter(), null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, new NoopChatHandlerProvider(), TEST_UTILS.getRowIngestionMetersFactory(), appenderatorsManager, new NoopIndexingServiceClient(), null, null, null);
}
use of org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider in project druid by druid-io.
the class WorkerTaskMonitorTest method createTaskMonitor.
private WorkerTaskMonitor createTaskMonitor() {
final TaskConfig taskConfig = new TaskConfig(FileUtils.createTempDir().toString(), null, null, 0, null, false, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
TaskActionClientFactory taskActionClientFactory = EasyMock.createNiceMock(TaskActionClientFactory.class);
TaskActionClient taskActionClient = EasyMock.createNiceMock(TaskActionClient.class);
EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())).andReturn(taskActionClient).anyTimes();
SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
EasyMock.replay(taskActionClientFactory, taskActionClient, notifierFactory);
return new WorkerTaskMonitor(jsonMapper, new SingleTaskBackgroundRunner(new TaskToolboxFactory(taskConfig, null, taskActionClientFactory, null, null, null, null, null, null, null, notifierFactory, null, null, NoopJoinableFactory.INSTANCE, null, new SegmentCacheManagerFactory(jsonMapper), jsonMapper, indexIO, null, null, null, indexMergerV9, null, null, null, null, new NoopTestTaskReportFileWriter(), null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, new NoopChatHandlerProvider(), testUtils.getRowIngestionMetersFactory(), new TestAppenderatorsManager(), new NoopIndexingServiceClient(), null, null, null), taskConfig, new NoopServiceEmitter(), DUMMY_NODE, new ServerConfig()), taskConfig, cf, workerCuratorCoordinator, EasyMock.createNiceMock(DruidLeaderClient.class));
}
use of org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testEmptyBaseDataSource.
@Test
public void testEmptyBaseDataSource() {
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage("baseDataSource cannot be null or empty. Please provide a baseDataSource.");
// noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception)
new MaterializedViewSupervisorSpec("", new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
}
use of org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testSupervisorSerialization.
@Test
public void testSupervisorSerialization() throws IOException {
String supervisorStr = "{\n" + " \"type\" : \"derivativeDataSource\",\n" + " \"baseDataSource\": \"wikiticker\",\n" + " \"dimensionsSpec\":{\n" + " \"dimensions\" : [\n" + " \"isUnpatrolled\",\n" + " \"metroCode\",\n" + " \"namespace\",\n" + " \"page\",\n" + " \"regionIsoCode\",\n" + " \"regionName\",\n" + " \"user\"\n" + " ]\n" + " },\n" + " \"metricsSpec\" : [\n" + " {\n" + " \"name\" : \"count\",\n" + " \"type\" : \"count\"\n" + " },\n" + " {\n" + " \"name\" : \"added\",\n" + " \"type\" : \"longSum\",\n" + " \"fieldName\" : \"added\"\n" + " }\n" + " ],\n" + " \"tuningConfig\": {\n" + " \"type\" : \"hadoop\"\n" + " }\n" + "}";
MaterializedViewSupervisorSpec expected = new MaterializedViewSupervisorSpec("wikiticker", new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
MaterializedViewSupervisorSpec spec = objectMapper.readValue(supervisorStr, MaterializedViewSupervisorSpec.class);
Assert.assertEquals(expected.getBaseDataSource(), spec.getBaseDataSource());
Assert.assertEquals(expected.getId(), spec.getId());
Assert.assertEquals(expected.getDataSourceName(), spec.getDataSourceName());
Assert.assertEquals(expected.getDimensions(), spec.getDimensions());
Assert.assertEquals(expected.getMetrics(), spec.getMetrics());
}
Aggregations