Search in sources :

Example 1 with SegmentHandoffNotifierFactory

use of org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory in project druid by druid-io.

the class AppenderatorDriverRealtimeIndexTaskTest method makeToolboxFactory.

private void makeToolboxFactory(final File directory) {
    taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null));
    publishedSegments = new CopyOnWriteArrayList<>();
    ObjectMapper mapper = new DefaultObjectMapper();
    mapper.registerSubtypes(LinearShardSpec.class);
    mapper.registerSubtypes(NumberedShardSpec.class);
    IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(mapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnectorRule.getConnector()) {

        @Override
        public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) throws IOException {
            Set<DataSegment> result = super.announceHistoricalSegments(segments);
            Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null);
            publishedSegments.addAll(result);
            segments.forEach(s -> segmentLatch.countDown());
            return result;
        }

        @Override
        public SegmentPublishResult announceHistoricalSegments(Set<DataSegment> segments, Set<DataSegment> segmentsToDrop, DataSourceMetadata startMetadata, DataSourceMetadata endMetadata) throws IOException {
            SegmentPublishResult result = super.announceHistoricalSegments(segments, segmentsToDrop, startMetadata, endMetadata);
            Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null);
            publishedSegments.addAll(result.getSegments());
            result.getSegments().forEach(s -> segmentLatch.countDown());
            return result;
        }
    };
    taskLockbox = new TaskLockbox(taskStorage, mdc);
    final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, true, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
    final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, taskStorage, mdc, EMITTER, EasyMock.createMock(SupervisorManager.class));
    final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox, new TaskAuditLogConfig(false));
    final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), (query, future) -> {
    // do nothing
    })));
    handOffCallbacks = new ConcurrentHashMap<>();
    final SegmentHandoffNotifierFactory handoffNotifierFactory = dataSource -> new SegmentHandoffNotifier() {

        @Override
        public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
            handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
            handoffLatch.countDown();
            return true;
        }

        @Override
        public void start() {
        // Noop
        }

        @Override
        public void close() {
        // Noop
        }
    };
    final TestUtils testUtils = new TestUtils();
    taskToolboxFactory = new TaskToolboxFactory(taskConfig, new DruidNode("druid/middlemanager", "localhost", false, 8091, null, true, false), taskActionClientFactory, EMITTER, new TestDataSegmentPusher(), new TestDataSegmentKiller(), // DataSegmentMover
    null, // DataSegmentArchiver
    null, new TestDataSegmentAnnouncer(), EasyMock.createNiceMock(DataSegmentServerAnnouncer.class), handoffNotifierFactory, () -> conglomerate, // queryExecutorService
    DirectQueryProcessingPool.INSTANCE, NoopJoinableFactory.INSTANCE, () -> EasyMock.createMock(MonitorScheduler.class), new SegmentCacheManagerFactory(testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), new CachePopulatorStats(), testUtils.getTestIndexMergerV9(), EasyMock.createNiceMock(DruidNodeAnnouncer.class), EasyMock.createNiceMock(DruidNode.class), new LookupNodeService("tier"), new DataNodeService("tier", 1000, ServerType.INDEXER_EXECUTOR, 0), new SingleFileTaskReportFileWriter(reportsFile), null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, new NoopChatHandlerProvider(), testUtils.getRowIngestionMetersFactory(), new TestAppenderatorsManager(), new NoopIndexingServiceClient(), null, null, null);
}
Also used : TaskReport(org.apache.druid.indexing.common.TaskReport) TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) DirectQueryProcessingPool(org.apache.druid.query.DirectQueryProcessingPool) Arrays(java.util.Arrays) LookupNodeService(org.apache.druid.discovery.LookupNodeService) TestDataSegmentAnnouncer(org.apache.druid.indexing.test.TestDataSegmentAnnouncer) TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) IngestionState(org.apache.druid.indexer.IngestionState) Pair(org.apache.druid.java.util.common.Pair) DataSourceMetadata(org.apache.druid.indexing.overlord.DataSourceMetadata) Map(java.util.Map) ExpressionTransform(org.apache.druid.segment.transform.ExpressionTransform) TestDataSegmentPusher(org.apache.druid.indexing.test.TestDataSegmentPusher) NoopJoinableFactory(org.apache.druid.segment.join.NoopJoinableFactory) NoopIndexingServiceClient(org.apache.druid.client.indexing.NoopIndexingServiceClient) Execs(org.apache.druid.java.util.common.concurrent.Execs) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) CacheConfig(org.apache.druid.client.cache.CacheConfig) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) InputRow(org.apache.druid.data.input.InputRow) TaskState(org.apache.druid.indexer.TaskState) CountDownLatch(java.util.concurrent.CountDownLatch) Firehose(org.apache.druid.data.input.Firehose) DimFilter(org.apache.druid.query.filter.DimFilter) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) TestDerbyConnector(org.apache.druid.metadata.TestDerbyConnector) TaskActionClientFactory(org.apache.druid.indexing.common.actions.TaskActionClientFactory) RealtimeAppenderatorTuningConfig(org.apache.druid.indexing.common.index.RealtimeAppenderatorTuningConfig) TaskStorage(org.apache.druid.indexing.overlord.TaskStorage) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) TransformSpec(org.apache.druid.segment.transform.TransformSpec) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) Iterables(com.google.common.collect.Iterables) DruidNodeAnnouncer(org.apache.druid.discovery.DruidNodeAnnouncer) ParseException(org.apache.druid.java.util.common.parsers.ParseException) TaskAuditLogConfig(org.apache.druid.indexing.common.actions.TaskAuditLogConfig) TimeAndDimsParseSpec(org.apache.druid.data.input.impl.TimeAndDimsParseSpec) TaskStatus(org.apache.druid.indexer.TaskStatus) EntryExistsException(org.apache.druid.metadata.EntryExistsException) LinkedHashMap(java.util.LinkedHashMap) SupervisorManager(org.apache.druid.indexing.overlord.supervisor.SupervisorManager) AuthTestUtils(org.apache.druid.server.security.AuthTestUtils) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) Nullable(javax.annotation.Nullable) Before(org.junit.Before) FirehoseFactory(org.apache.druid.data.input.FirehoseFactory) TaskToolboxFactory(org.apache.druid.indexing.common.TaskToolboxFactory) Files(java.nio.file.Files) Executor(java.util.concurrent.Executor) DataSegmentServerAnnouncer(org.apache.druid.server.coordination.DataSegmentServerAnnouncer) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) FileUtils(org.apache.commons.io.FileUtils) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) File(java.io.File) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Result(org.apache.druid.query.Result) HeapMemoryTaskStorage(org.apache.druid.indexing.overlord.HeapMemoryTaskStorage) DefaultQueryRunnerFactoryConglomerate(org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate) DruidNode(org.apache.druid.server.DruidNode) Assert(org.junit.Assert) ArrayDeque(java.util.ArrayDeque) DataSchema(org.apache.druid.segment.indexing.DataSchema) QueryPlus(org.apache.druid.query.QueryPlus) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) LocalTaskActionClientFactory(org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) Druids(org.apache.druid.query.Druids) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) After(org.junit.After) ServerType(org.apache.druid.server.coordination.ServerType) TypeReference(com.fasterxml.jackson.core.type.TypeReference) NoopChatHandlerProvider(org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider) DateTimes(org.apache.druid.java.util.common.DateTimes) RealtimeAppenderatorIngestionSpec(org.apache.druid.indexing.common.index.RealtimeAppenderatorIngestionSpec) JacksonUtils(org.apache.druid.java.util.common.jackson.JacksonUtils) ImmutableMap(com.google.common.collect.ImmutableMap) SegmentPublishResult(org.apache.druid.indexing.overlord.SegmentPublishResult) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) StringUtils(org.apache.druid.java.util.common.StringUtils) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) RealtimeIOConfig(org.apache.druid.segment.indexing.RealtimeIOConfig) Collectors(java.util.stream.Collectors) TestUtils(org.apache.druid.indexing.common.TestUtils) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) IndexerSQLMetadataStorageCoordinator(org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator) DataNodeService(org.apache.druid.discovery.DataNodeService) List(java.util.List) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) DataSegment(org.apache.druid.timeline.DataSegment) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) MapCache(org.apache.druid.client.cache.MapCache) Logger(org.apache.druid.java.util.common.logger.Logger) MoreExecutors(com.google.common.util.concurrent.MoreExecutors) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) SingleFileTaskReportFileWriter(org.apache.druid.indexing.common.SingleFileTaskReportFileWriter) TaskStorageConfig(org.apache.druid.indexing.common.config.TaskStorageConfig) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) Deque(java.util.Deque) TaskActionToolbox(org.apache.druid.indexing.common.actions.TaskActionToolbox) ImmutableList(com.google.common.collect.ImmutableList) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) NoopEmitter(org.apache.druid.java.util.emitter.core.NoopEmitter) ExpectedException(org.junit.rules.ExpectedException) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) Period(org.joda.time.Period) TaskLockbox(org.apache.druid.indexing.overlord.TaskLockbox) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) TestDataSegmentKiller(org.apache.druid.indexing.test.TestDataSegmentKiller) Granularities(org.apache.druid.java.util.common.granularity.Granularities) TimeUnit(java.util.concurrent.TimeUnit) TestHelper(org.apache.druid.segment.TestHelper) Rule(org.junit.Rule) NullHandling(org.apache.druid.common.config.NullHandling) MonitorScheduler(org.apache.druid.java.util.metrics.MonitorScheduler) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) TemporaryFolder(org.junit.rules.TemporaryFolder) SingleFileTaskReportFileWriter(org.apache.druid.indexing.common.SingleFileTaskReportFileWriter) Set(java.util.Set) TaskActionClientFactory(org.apache.druid.indexing.common.actions.TaskActionClientFactory) LocalTaskActionClientFactory(org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory) TestDataSegmentAnnouncer(org.apache.druid.indexing.test.TestDataSegmentAnnouncer) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) TaskAuditLogConfig(org.apache.druid.indexing.common.actions.TaskAuditLogConfig) DataSegment(org.apache.druid.timeline.DataSegment) SegmentPublishResult(org.apache.druid.indexing.overlord.SegmentPublishResult) AuthTestUtils(org.apache.druid.server.security.AuthTestUtils) TestUtils(org.apache.druid.indexing.common.TestUtils) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) DefaultQueryRunnerFactoryConglomerate(org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) DataSourceMetadata(org.apache.druid.indexing.overlord.DataSourceMetadata) Executor(java.util.concurrent.Executor) NoopIndexingServiceClient(org.apache.druid.client.indexing.NoopIndexingServiceClient) TaskToolboxFactory(org.apache.druid.indexing.common.TaskToolboxFactory) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) TaskActionToolbox(org.apache.druid.indexing.common.actions.TaskActionToolbox) LocalTaskActionClientFactory(org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory) CacheConfig(org.apache.druid.client.cache.CacheConfig) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) TestDataSegmentPusher(org.apache.druid.indexing.test.TestDataSegmentPusher) IndexerSQLMetadataStorageCoordinator(org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) TaskStorageConfig(org.apache.druid.indexing.common.config.TaskStorageConfig) NoopChatHandlerProvider(org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider) HeapMemoryTaskStorage(org.apache.druid.indexing.overlord.HeapMemoryTaskStorage) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) DefaultQueryRunnerFactoryConglomerate(org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate) LookupNodeService(org.apache.druid.discovery.LookupNodeService) TestDataSegmentKiller(org.apache.druid.indexing.test.TestDataSegmentKiller) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) SupervisorManager(org.apache.druid.indexing.overlord.supervisor.SupervisorManager) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) TaskLockbox(org.apache.druid.indexing.overlord.TaskLockbox) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) DruidNode(org.apache.druid.server.DruidNode) DataNodeService(org.apache.druid.discovery.DataNodeService)

Example 2 with SegmentHandoffNotifierFactory

use of org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory in project druid by druid-io.

the class IndexTaskTest method testWaitForSegmentAvailabilityMultipleSegmentsTimeout.

@Test
public void testWaitForSegmentAvailabilityMultipleSegmentsTimeout() throws IOException {
    final File tmpDir = temporaryFolder.newFolder();
    TaskToolbox mockToolbox = EasyMock.createMock(TaskToolbox.class);
    SegmentHandoffNotifierFactory mockFactory = EasyMock.createMock(SegmentHandoffNotifierFactory.class);
    SegmentHandoffNotifier mockNotifier = EasyMock.createMock(SegmentHandoffNotifier.class);
    DataSegment mockDataSegment1 = EasyMock.createMock(DataSegment.class);
    DataSegment mockDataSegment2 = EasyMock.createMock(DataSegment.class);
    List<DataSegment> segmentsToWaitFor = new ArrayList<>();
    segmentsToWaitFor.add(mockDataSegment1);
    segmentsToWaitFor.add(mockDataSegment2);
    IndexTask indexTask = new IndexTask(null, null, createDefaultIngestionSpec(jsonMapper, tmpDir, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null, createTuningConfigWithMaxRowsPerSegment(2, true), false, false), null);
    EasyMock.expect(mockDataSegment1.getInterval()).andReturn(Intervals.of("1970-01-01/2100-01-01")).once();
    EasyMock.expect(mockDataSegment1.getVersion()).andReturn("dummyString").once();
    EasyMock.expect(mockDataSegment1.getShardSpec()).andReturn(EasyMock.createMock(ShardSpec.class)).once();
    EasyMock.expect(mockDataSegment2.getInterval()).andReturn(Intervals.of("1970-01-01/2100-01-01")).once();
    EasyMock.expect(mockDataSegment2.getVersion()).andReturn("dummyString").once();
    EasyMock.expect(mockDataSegment2.getShardSpec()).andReturn(EasyMock.createMock(ShardSpec.class)).once();
    EasyMock.expect(mockToolbox.getSegmentHandoffNotifierFactory()).andReturn(mockFactory).once();
    EasyMock.expect(mockToolbox.getEmitter()).andReturn(new NoopServiceEmitter()).anyTimes();
    EasyMock.expect(mockDataSegment1.getDataSource()).andReturn("MockDataSource").once();
    EasyMock.expect(mockFactory.createSegmentHandoffNotifier("MockDataSource")).andReturn(mockNotifier).once();
    mockNotifier.start();
    EasyMock.expectLastCall().once();
    mockNotifier.registerSegmentHandoffCallback(EasyMock.anyObject(), EasyMock.anyObject(), EasyMock.anyObject());
    EasyMock.expectLastCall().andReturn(true).times(2);
    mockNotifier.close();
    EasyMock.expectLastCall().once();
    EasyMock.replay(mockToolbox);
    EasyMock.replay(mockDataSegment1, mockDataSegment2);
    EasyMock.replay(mockFactory, mockNotifier);
    Assert.assertFalse(indexTask.waitForSegmentAvailability(mockToolbox, segmentsToWaitFor, 1000));
    EasyMock.verify(mockToolbox);
    EasyMock.verify(mockDataSegment1, mockDataSegment2);
    EasyMock.verify(mockFactory, mockNotifier);
}
Also used : TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) ArrayList(java.util.ArrayList) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) File(java.io.File) DataSegment(org.apache.druid.timeline.DataSegment) NoopSegmentHandoffNotifierFactory(org.apache.druid.segment.realtime.plumber.NoopSegmentHandoffNotifierFactory) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) Test(org.junit.Test)

Example 3 with SegmentHandoffNotifierFactory

use of org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTimelineTest method constructorFeeder.

@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder() {
    final List<TestCase> testCases = ImmutableList.of(tc("2000/2000T02", 3, 7, ds("2000/2000T01", "v1", 0, ir("2000", 1), ir("2000T00:01", 2)), ds("2000T01/2000T02", "v1", 0, ir("2000T01", 4))), /* Adjacent segments */
    tc("2000/2000T02", 3, 7, ds("2000/2000T02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8)), ds("2000T01/2000T02", "v2", 0, ir("2000T01:01", 4))), /* 1H segment overlaid on top of 2H segment */
    tc("2000/2000-01-02", 4, 23, ds("2000/2000-01-02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8), ir("2000T02", 16)), ds("2000T01/2000T02", "v2", 0, ir("2000T01:01", 4))), /* 1H segment overlaid on top of 1D segment */
    tc("2000/2000T02", 4, 15, ds("2000/2000T02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8)), ds("2000/2000T02", "v1", 1, ir("2000T01:01", 4))), /* Segment set with two segments for the same interval */
    tc("2000T01/2000T02", 1, 2, ds("2000/2000T03", "v1", 0, ir("2000", 1), ir("2000T01", 2), ir("2000T02", 4))), /* Segment wider than desired interval */
    tc("2000T02/2000T04", 2, 12, ds("2000/2000T03", "v1", 0, ir("2000", 1), ir("2000T01", 2), ir("2000T02", 4)), ds("2000T03/2000T04", "v1", 0, ir("2000T03", 8))));
    final List<Object[]> constructors = new ArrayList<>();
    for (final TestCase testCase : testCases) {
        SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
        EasyMock.replay(notifierFactory);
        final SegmentCacheManagerFactory slf = new SegmentCacheManagerFactory(MAPPER);
        final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig());
        final CoordinatorClient cc = new CoordinatorClient(null, null) {

            @Override
            public Collection<DataSegment> fetchUsedSegmentsInDataSourceForIntervals(String dataSource, List<Interval> intervals) {
                // Expect the interval we asked for
                if (intervals.equals(ImmutableList.of(testCase.interval))) {
                    return ImmutableSet.copyOf(testCase.segments);
                } else {
                    throw new IllegalArgumentException("BAD");
                }
            }

            @Override
            public DataSegment fetchUsedSegment(String dataSource, String segmentId) {
                return testCase.segments.stream().filter(s -> s.getId().toString().equals(segmentId)).findAny().get();
            }
        };
        final IngestSegmentFirehoseFactory factory = new IngestSegmentFirehoseFactory(DATA_SOURCE, testCase.interval, null, TrueDimFilter.instance(), Arrays.asList(DIMENSIONS), Arrays.asList(METRICS), // Split as much as possible
        1L, INDEX_IO, cc, slf, retryPolicyFactory);
        constructors.add(new Object[] { testCase.toString(), factory, testCase.tmpDir, testCase.expectedCount, testCase.expectedSum, testCase.segments.size() });
    }
    return constructors;
}
Also used : RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) ArrayList(java.util.ArrayList) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) DataSegment(org.apache.druid.timeline.DataSegment) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) CoordinatorClient(org.apache.druid.client.coordinator.CoordinatorClient) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList)

Example 4 with SegmentHandoffNotifierFactory

use of org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTest method constructorFeeder.

@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder() throws IOException {
    final IndexSpec indexSpec = new IndexSpec();
    final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(JodaUtils.MIN_INSTANT).withDimensionsSpec(ROW_PARSER).withMetrics(new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME), new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME)).build();
    final IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(MAX_ROWS * MAX_SHARD_NUMBER).build();
    for (Integer i = 0; i < MAX_ROWS; ++i) {
        index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0));
    }
    FileUtils.mkdirp(PERSIST_DIR);
    INDEX_MERGER_V9.persist(index, PERSIST_DIR, indexSpec, null);
    final CoordinatorClient cc = new CoordinatorClient(null, null) {

        @Override
        public Collection<DataSegment> fetchUsedSegmentsInDataSourceForIntervals(String dataSource, List<Interval> intervals) {
            return ImmutableSet.copyOf(SEGMENT_SET);
        }
    };
    SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    EasyMock.replay(notifierFactory);
    final SegmentCacheManagerFactory slf = new SegmentCacheManagerFactory(MAPPER);
    final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig());
    Collection<Object[]> values = new ArrayList<>();
    for (InputRowParser parser : Arrays.<InputRowParser>asList(ROW_PARSER, new MapInputRowParser(new JSONParseSpec(new TimestampSpec(TIME_COLUMN, "auto", null), DimensionsSpec.builder().setDimensionExclusions(ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME)).build(), null, null, null)))) {
        for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
            for (List<String> metric_names : Arrays.<List<String>>asList(null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME))) {
                for (Boolean wrapInCombining : Arrays.asList(false, true)) {
                    final IngestSegmentFirehoseFactory isfFactory = new IngestSegmentFirehoseFactory(TASK.getDataSource(), Intervals.ETERNITY, null, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, null, INDEX_IO, cc, slf, retryPolicyFactory);
                    final FirehoseFactory factory = wrapInCombining ? new CombiningFirehoseFactory(ImmutableList.of(isfFactory)) : isfFactory;
                    values.add(new Object[] { StringUtils.format("DimNames[%s]MetricNames[%s]ParserDimNames[%s]WrapInCombining[%s]", dim_names == null ? "null" : "dims", metric_names == null ? "null" : "metrics", parser == ROW_PARSER ? "dims" : "null", wrapInCombining), factory, parser });
                }
            }
        }
    }
    return values;
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) CombiningFirehoseFactory(org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory) FirehoseFactory(org.apache.druid.data.input.FirehoseFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) ArrayList(java.util.ArrayList) DataSegment(org.apache.druid.timeline.DataSegment) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) CoordinatorClient(org.apache.druid.client.coordinator.CoordinatorClient) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) CombiningFirehoseFactory(org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser)

Example 5 with SegmentHandoffNotifierFactory

use of org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory in project druid by druid-io.

the class WorkerTaskMonitorTest method createTaskMonitor.

private WorkerTaskMonitor createTaskMonitor() {
    final TaskConfig taskConfig = new TaskConfig(FileUtils.createTempDir().toString(), null, null, 0, null, false, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
    TaskActionClientFactory taskActionClientFactory = EasyMock.createNiceMock(TaskActionClientFactory.class);
    TaskActionClient taskActionClient = EasyMock.createNiceMock(TaskActionClient.class);
    EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())).andReturn(taskActionClient).anyTimes();
    SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    EasyMock.replay(taskActionClientFactory, taskActionClient, notifierFactory);
    return new WorkerTaskMonitor(jsonMapper, new SingleTaskBackgroundRunner(new TaskToolboxFactory(taskConfig, null, taskActionClientFactory, null, null, null, null, null, null, null, notifierFactory, null, null, NoopJoinableFactory.INSTANCE, null, new SegmentCacheManagerFactory(jsonMapper), jsonMapper, indexIO, null, null, null, indexMergerV9, null, null, null, null, new NoopTestTaskReportFileWriter(), null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, new NoopChatHandlerProvider(), testUtils.getRowIngestionMetersFactory(), new TestAppenderatorsManager(), new NoopIndexingServiceClient(), null, null, null), taskConfig, new NoopServiceEmitter(), DUMMY_NODE, new ServerConfig()), taskConfig, cf, workerCuratorCoordinator, EasyMock.createNiceMock(DruidLeaderClient.class));
}
Also used : NoopChatHandlerProvider(org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) TaskActionClientFactory(org.apache.druid.indexing.common.actions.TaskActionClientFactory) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) DruidLeaderClient(org.apache.druid.discovery.DruidLeaderClient) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) ServerConfig(org.apache.druid.server.initialization.ServerConfig) TaskActionClient(org.apache.druid.indexing.common.actions.TaskActionClient) NoopIndexingServiceClient(org.apache.druid.client.indexing.NoopIndexingServiceClient) TaskToolboxFactory(org.apache.druid.indexing.common.TaskToolboxFactory) NoopTestTaskReportFileWriter(org.apache.druid.indexing.common.task.NoopTestTaskReportFileWriter) TestAppenderatorsManager(org.apache.druid.indexing.common.task.TestAppenderatorsManager) SingleTaskBackgroundRunner(org.apache.druid.indexing.overlord.SingleTaskBackgroundRunner)

Aggregations

SegmentHandoffNotifierFactory (org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory)9 SegmentCacheManagerFactory (org.apache.druid.indexing.common.SegmentCacheManagerFactory)8 NoopIndexingServiceClient (org.apache.druid.client.indexing.NoopIndexingServiceClient)6 TaskToolboxFactory (org.apache.druid.indexing.common.TaskToolboxFactory)6 TaskActionClientFactory (org.apache.druid.indexing.common.actions.TaskActionClientFactory)6 TaskConfig (org.apache.druid.indexing.common.config.TaskConfig)6 NoopChatHandlerProvider (org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider)6 SegmentHandoffNotifier (org.apache.druid.segment.handoff.SegmentHandoffNotifier)5 ImmutableList (com.google.common.collect.ImmutableList)4 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)4 List (java.util.List)4 Executor (java.util.concurrent.Executor)4 CacheConfig (org.apache.druid.client.cache.CacheConfig)4 CachePopulatorStats (org.apache.druid.client.cache.CachePopulatorStats)4 DataNodeService (org.apache.druid.discovery.DataNodeService)4 DruidNodeAnnouncer (org.apache.druid.discovery.DruidNodeAnnouncer)4 LookupNodeService (org.apache.druid.discovery.LookupNodeService)4 DataSegment (org.apache.druid.timeline.DataSegment)4 File (java.io.File)3 ArrayList (java.util.ArrayList)3