Search in sources :

Example 11 with TimelineObjectHolder

use of org.apache.druid.timeline.TimelineObjectHolder in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTest method testGetUniqueDimensionsAndMetrics.

@Test
public void testGetUniqueDimensionsAndMetrics() {
    final int numSegmentsPerPartitionChunk = 5;
    final int numPartitionChunksPerTimelineObject = 10;
    final int numSegments = numSegmentsPerPartitionChunk * numPartitionChunksPerTimelineObject;
    final Interval interval = Intervals.of("2017-01-01/2017-01-02");
    final String version = "1";
    final List<TimelineObjectHolder<String, DataSegment>> timelineSegments = new ArrayList<>();
    for (int i = 0; i < numPartitionChunksPerTimelineObject; i++) {
        final List<PartitionChunk<DataSegment>> chunks = new ArrayList<>();
        for (int j = 0; j < numSegmentsPerPartitionChunk; j++) {
            final List<String> dims = IntStream.range(i, i + numSegmentsPerPartitionChunk).mapToObj(suffix -> "dim" + suffix).collect(Collectors.toList());
            final List<String> metrics = IntStream.range(i, i + numSegmentsPerPartitionChunk).mapToObj(suffix -> "met" + suffix).collect(Collectors.toList());
            final DataSegment segment = new DataSegment("ds", interval, version, ImmutableMap.of(), dims, metrics, new NumberedShardSpec(numPartitionChunksPerTimelineObject, i), 1, 1);
            final PartitionChunk<DataSegment> partitionChunk = new NumberedPartitionChunk<>(i, numPartitionChunksPerTimelineObject, segment);
            chunks.add(partitionChunk);
        }
        final TimelineObjectHolder<String, DataSegment> timelineHolder = new TimelineObjectHolder<>(interval, version, new PartitionHolder<>(chunks));
        timelineSegments.add(timelineHolder);
    }
    final String[] expectedDims = new String[] { "dim9", "dim10", "dim11", "dim12", "dim13", "dim8", "dim7", "dim6", "dim5", "dim4", "dim3", "dim2", "dim1", "dim0" };
    final String[] expectedMetrics = new String[] { "met9", "met10", "met11", "met12", "met13", "met8", "met7", "met6", "met5", "met4", "met3", "met2", "met1", "met0" };
    Assert.assertEquals(Arrays.asList(expectedDims), ReingestionTimelineUtils.getUniqueDimensions(timelineSegments, null));
    Assert.assertEquals(Arrays.asList(expectedMetrics), ReingestionTimelineUtils.getUniqueMetrics(timelineSegments));
}
Also used : Module(com.google.inject.Module) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) Arrays(java.util.Arrays) NumberedPartitionChunk(org.apache.druid.timeline.partition.NumberedPartitionChunk) IndexSpec(org.apache.druid.segment.IndexSpec) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) SimpleModule(com.fasterxml.jackson.databind.module.SimpleModule) JodaUtils(org.apache.druid.java.util.common.JodaUtils) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) LocalDataSegmentPuller(org.apache.druid.segment.loading.LocalDataSegmentPuller) Task(org.apache.druid.indexing.common.task.Task) After(org.junit.After) Map(java.util.Map) ExpressionTransform(org.apache.druid.segment.transform.ExpressionTransform) FileUtils(org.apache.druid.java.util.common.FileUtils) Parameterized(org.junit.runners.Parameterized) RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) AfterClass(org.junit.AfterClass) ImmutableSet(com.google.common.collect.ImmutableSet) LocalLoadSpec(org.apache.druid.segment.loading.LocalLoadSpec) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) ImmutableMap(com.google.common.collect.ImmutableMap) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Collection(java.util.Collection) Segments(org.apache.druid.indexing.overlord.Segments) StringUtils(org.apache.druid.java.util.common.StringUtils) Set(java.util.Set) CombiningFirehoseFactory(org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) TestExprMacroTable(org.apache.druid.query.expression.TestExprMacroTable) Collectors(java.util.stream.Collectors) GuiceInjectors(org.apache.druid.guice.GuiceInjectors) TestUtils(org.apache.druid.indexing.common.TestUtils) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) IndexerSQLMetadataStorageCoordinator(org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator) InputRow(org.apache.druid.data.input.InputRow) NoopTask(org.apache.druid.indexing.common.task.NoopTask) Firehose(org.apache.druid.data.input.Firehose) PartitionHolder(org.apache.druid.timeline.partition.PartitionHolder) List(java.util.List) GuiceAnnotationIntrospector(org.apache.druid.guice.GuiceAnnotationIntrospector) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) DataSegment(org.apache.druid.timeline.DataSegment) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) TaskStorage(org.apache.druid.indexing.overlord.TaskStorage) TransformSpec(org.apache.druid.segment.transform.TransformSpec) Logger(org.apache.druid.java.util.common.logger.Logger) IntStream(java.util.stream.IntStream) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) BeforeClass(org.junit.BeforeClass) Intervals(org.apache.druid.java.util.common.Intervals) TaskStorageConfig(org.apache.druid.indexing.common.config.TaskStorageConfig) RunWith(org.junit.runner.RunWith) AnnotationIntrospectorPair(com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair) TimeAndDimsParseSpec(org.apache.druid.data.input.impl.TimeAndDimsParseSpec) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) PartitionChunk(org.apache.druid.timeline.partition.PartitionChunk) Interval(org.joda.time.Interval) ColumnHolder(org.apache.druid.segment.column.ColumnHolder) ImmutableList(com.google.common.collect.ImmutableList) Binder(com.google.inject.Binder) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) CoordinatorClient(org.apache.druid.client.coordinator.CoordinatorClient) IndexMergerV9(org.apache.druid.segment.IndexMergerV9) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) Before(org.junit.Before) ReingestionTimelineUtils(org.apache.druid.indexing.common.ReingestionTimelineUtils) FirehoseFactory(org.apache.druid.data.input.FirehoseFactory) TaskLockbox(org.apache.druid.indexing.overlord.TaskLockbox) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) File(java.io.File) TestHelper(org.apache.druid.segment.TestHelper) Rule(org.junit.Rule) HeapMemoryTaskStorage(org.apache.druid.indexing.overlord.HeapMemoryTaskStorage) Paths(java.nio.file.Paths) Preconditions(com.google.common.base.Preconditions) Assert(org.junit.Assert) IndexIO(org.apache.druid.segment.IndexIO) GuiceInjectableValues(org.apache.druid.guice.GuiceInjectableValues) TemporaryFolder(org.junit.rules.TemporaryFolder) ArrayList(java.util.ArrayList) DataSegment(org.apache.druid.timeline.DataSegment) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) NumberedPartitionChunk(org.apache.druid.timeline.partition.NumberedPartitionChunk) PartitionChunk(org.apache.druid.timeline.partition.PartitionChunk) NumberedPartitionChunk(org.apache.druid.timeline.partition.NumberedPartitionChunk) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 12 with TimelineObjectHolder

use of org.apache.druid.timeline.TimelineObjectHolder in project druid by druid-io.

the class AbstractITBatchIndexTest method submitTaskAndWait.

protected void submitTaskAndWait(String taskSpec, String dataSourceName, boolean waitForNewVersion, boolean waitForSegmentsToLoad, Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair) {
    final List<DataSegment> oldVersions = waitForNewVersion ? coordinator.getAvailableSegments(dataSourceName) : null;
    long startSubTaskCount = -1;
    final boolean assertRunsSubTasks = taskSpec.contains("index_parallel");
    if (assertRunsSubTasks) {
        startSubTaskCount = countCompleteSubTasks(dataSourceName, !taskSpec.contains("dynamic"));
    }
    final String taskID = indexer.submitTask(taskSpec);
    LOG.info("TaskID for loading index task %s", taskID);
    indexer.waitUntilTaskCompletes(taskID);
    if (assertRunsSubTasks) {
        final boolean perfectRollup = !taskSpec.contains("dynamic");
        final long newSubTasks = countCompleteSubTasks(dataSourceName, perfectRollup) - startSubTaskCount;
        Assert.assertTrue(newSubTasks > 0, StringUtils.format("The supervisor task[%s] didn't create any sub tasks. Was it executed in the parallel mode?", taskID));
    }
    if (segmentAvailabilityConfirmationPair.lhs != null && segmentAvailabilityConfirmationPair.lhs) {
        TaskReport reportRaw = indexer.getTaskReport(taskID).get("ingestionStatsAndErrors");
        IngestionStatsAndErrorsTaskReport report = (IngestionStatsAndErrorsTaskReport) reportRaw;
        IngestionStatsAndErrorsTaskReportData reportData = (IngestionStatsAndErrorsTaskReportData) report.getPayload();
        // Confirm that the task waited longer than 0ms for the task to complete.
        Assert.assertTrue(reportData.getSegmentAvailabilityWaitTimeMs() > 0);
        // Make sure that the result of waiting for segments to load matches the expected result
        if (segmentAvailabilityConfirmationPair.rhs != null) {
            Assert.assertEquals(Boolean.valueOf(reportData.isSegmentAvailabilityConfirmed()), segmentAvailabilityConfirmationPair.rhs);
        }
    }
    // original segments have loaded.
    if (waitForNewVersion) {
        ITRetryUtil.retryUntilTrue(() -> {
            final VersionedIntervalTimeline<String, DataSegment> timeline = VersionedIntervalTimeline.forSegments(coordinator.getAvailableSegments(dataSourceName));
            final List<TimelineObjectHolder<String, DataSegment>> holders = timeline.lookup(Intervals.ETERNITY);
            return FluentIterable.from(holders).transformAndConcat(TimelineObjectHolder::getObject).anyMatch(chunk -> FluentIterable.from(oldVersions).anyMatch(oldSegment -> chunk.getObject().overshadows(oldSegment)));
        }, "See a new version");
    }
    if (waitForSegmentsToLoad) {
        ITRetryUtil.retryUntilTrue(() -> coordinator.areSegmentsLoaded(dataSourceName), "Segment Load");
    }
}
Also used : TaskReport(org.apache.druid.indexing.common.TaskReport) Logger(org.apache.druid.java.util.common.logger.Logger) Intervals(org.apache.druid.java.util.common.Intervals) ClientInfoResourceTestClient(org.apache.druid.testing.clients.ClientInfoResourceTestClient) Inject(com.google.inject.Inject) Function(java.util.function.Function) PartialDimensionDistributionTask(org.apache.druid.indexing.common.task.batch.parallel.PartialDimensionDistributionTask) PartialRangeSegmentGenerateTask(org.apache.druid.indexing.common.task.batch.parallel.PartialRangeSegmentGenerateTask) Pair(org.apache.druid.java.util.common.Pair) ArrayList(java.util.ArrayList) Assert(org.testng.Assert) FluentIterable(com.google.common.collect.FluentIterable) PartialDimensionCardinalityTask(org.apache.druid.indexing.common.task.batch.parallel.PartialDimensionCardinalityTask) SecondaryPartitionType(org.apache.druid.indexer.partitions.SecondaryPartitionType) IntegrationTestingConfig(org.apache.druid.testing.IntegrationTestingConfig) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) ITRetryUtil(org.apache.druid.testing.utils.ITRetryUtil) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) StringUtils(org.apache.druid.java.util.common.StringUtils) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) PartialHashSegmentGenerateTask(org.apache.druid.indexing.common.task.batch.parallel.PartialHashSegmentGenerateTask) ISE(org.apache.druid.java.util.common.ISE) IOException(java.io.IOException) StandardCharsets(java.nio.charset.StandardCharsets) IOUtils(org.apache.commons.io.IOUtils) List(java.util.List) SinglePhaseSubTask(org.apache.druid.indexing.common.task.batch.parallel.SinglePhaseSubTask) IngestionStatsAndErrorsTaskReport(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport) PartialGenericSegmentMergeTask(org.apache.druid.indexing.common.task.batch.parallel.PartialGenericSegmentMergeTask) DataSegment(org.apache.druid.timeline.DataSegment) SqlTestQueryHelper(org.apache.druid.testing.utils.SqlTestQueryHelper) InputStream(java.io.InputStream) IngestionStatsAndErrorsTaskReport(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) TaskReport(org.apache.druid.indexing.common.TaskReport) IngestionStatsAndErrorsTaskReport(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) DataSegment(org.apache.druid.timeline.DataSegment)

Example 13 with TimelineObjectHolder

use of org.apache.druid.timeline.TimelineObjectHolder in project druid by druid-io.

the class QueryRunnerTestHelper method makeFilteringQueryRunner.

public static <T> QueryRunner<T> makeFilteringQueryRunner(final VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline, final QueryRunnerFactory<T, Query<T>> factory) {
    final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
    return new FluentQueryRunnerBuilder<T>(toolChest).create(new QueryRunner<T>() {

        @Override
        public Sequence<T> run(QueryPlus<T> queryPlus, ResponseContext responseContext) {
            Query<T> query = queryPlus.getQuery();
            List<TimelineObjectHolder> segments = new ArrayList<>();
            for (Interval interval : query.getIntervals()) {
                segments.addAll(timeline.lookup(interval));
            }
            List<Sequence<T>> sequences = new ArrayList<>();
            for (TimelineObjectHolder<String, ReferenceCountingSegment> holder : toolChest.filterSegments(query, segments)) {
                Segment segment = holder.getObject().getChunk(0).getObject();
                QueryPlus queryPlusRunning = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new SpecificSegmentSpec(new SegmentDescriptor(holder.getInterval(), holder.getVersion(), 0))));
                sequences.add(factory.createRunner(segment).run(queryPlusRunning, responseContext));
            }
            return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(sequences));
        }
    }).applyPreMergeDecoration().mergeResults().applyPostMergeDecoration();
}
Also used : ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) ArrayList(java.util.ArrayList) Sequence(org.apache.druid.java.util.common.guava.Sequence) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) Segment(org.apache.druid.segment.Segment) ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) SpecificSegmentSpec(org.apache.druid.query.spec.SpecificSegmentSpec) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) ResponseContext(org.apache.druid.query.context.ResponseContext) Interval(org.joda.time.Interval)

Example 14 with TimelineObjectHolder

use of org.apache.druid.timeline.TimelineObjectHolder in project druid by druid-io.

the class HadoopIngestionSpec method updateSegmentListIfDatasourcePathSpecIsUsed.

public static void updateSegmentListIfDatasourcePathSpecIsUsed(HadoopIngestionSpec spec, ObjectMapper jsonMapper, UsedSegmentsRetriever segmentsRetriever) throws IOException {
    String dataSource = "dataSource";
    String type = "type";
    String multi = "multi";
    String children = "children";
    String segments = "segments";
    String ingestionSpec = "ingestionSpec";
    Map<String, Object> pathSpec = spec.getIOConfig().getPathSpec();
    List<Map<String, Object>> datasourcePathSpecs = new ArrayList<>();
    if (pathSpec.get(type).equals(dataSource)) {
        datasourcePathSpecs.add(pathSpec);
    } else if (pathSpec.get(type).equals(multi)) {
        List<Map<String, Object>> childPathSpecs = (List<Map<String, Object>>) pathSpec.get(children);
        for (Map<String, Object> childPathSpec : childPathSpecs) {
            if (childPathSpec.get(type).equals(dataSource)) {
                datasourcePathSpecs.add(childPathSpec);
            }
        }
    }
    for (Map<String, Object> datasourcePathSpec : datasourcePathSpecs) {
        Map<String, Object> ingestionSpecMap = (Map<String, Object>) datasourcePathSpec.get(ingestionSpec);
        DatasourceIngestionSpec ingestionSpecObj = jsonMapper.convertValue(ingestionSpecMap, DatasourceIngestionSpec.class);
        Collection<DataSegment> usedVisibleSegments = segmentsRetriever.retrieveUsedSegmentsForIntervals(ingestionSpecObj.getDataSource(), ingestionSpecObj.getIntervals(), Segments.ONLY_VISIBLE);
        if (ingestionSpecObj.getSegments() != null) {
            // ensure that user supplied segment list matches with the usedVisibleSegments obtained from db
            // this safety check lets users do test-n-set kind of batch delta ingestion where the delta
            // ingestion task would only run if current state of the system is same as when they submitted
            // the task.
            List<DataSegment> userSuppliedSegmentsList = ingestionSpecObj.getSegments();
            if (usedVisibleSegments.size() == userSuppliedSegmentsList.size()) {
                Set<DataSegment> segmentsSet = new HashSet<>(usedVisibleSegments);
                for (DataSegment userSegment : userSuppliedSegmentsList) {
                    if (!segmentsSet.contains(userSegment)) {
                        throw new IOException("user supplied segments list did not match with segments list obtained from db");
                    }
                }
            } else {
                throw new IOException("user supplied segments list did not match with segments list obtained from db");
            }
        }
        final VersionedIntervalTimeline<String, DataSegment> timeline = VersionedIntervalTimeline.forSegments(usedVisibleSegments);
        final List<WindowedDataSegment> windowedSegments = new ArrayList<>();
        for (Interval interval : ingestionSpecObj.getIntervals()) {
            final List<TimelineObjectHolder<String, DataSegment>> timeLineSegments = timeline.lookup(interval);
            for (TimelineObjectHolder<String, DataSegment> holder : timeLineSegments) {
                for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
                    windowedSegments.add(new WindowedDataSegment(chunk.getObject(), holder.getInterval()));
                }
            }
            datasourcePathSpec.put(segments, windowedSegments);
        }
    }
}
Also used : DatasourceIngestionSpec(org.apache.druid.indexer.hadoop.DatasourceIngestionSpec) ArrayList(java.util.ArrayList) IOException(java.io.IOException) DataSegment(org.apache.druid.timeline.DataSegment) WindowedDataSegment(org.apache.druid.indexer.hadoop.WindowedDataSegment) WindowedDataSegment(org.apache.druid.indexer.hadoop.WindowedDataSegment) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) HashSet(java.util.HashSet) Interval(org.joda.time.Interval)

Example 15 with TimelineObjectHolder

use of org.apache.druid.timeline.TimelineObjectHolder in project druid by druid-io.

the class SinglePhaseSubTask method runTask.

@Override
public TaskStatus runTask(final TaskToolbox toolbox) {
    try {
        if (missingIntervalsInOverwriteMode) {
            LOG.warn("Intervals are missing in granularitySpec while this task is potentially overwriting existing segments. " + "Forced to use timeChunk lock.");
        }
        this.authorizerMapper = toolbox.getAuthorizerMapper();
        toolbox.getChatHandlerProvider().register(getId(), this, false);
        rowIngestionMeters = toolbox.getRowIngestionMetersFactory().createRowIngestionMeters();
        parseExceptionHandler = new ParseExceptionHandler(rowIngestionMeters, ingestionSchema.getTuningConfig().isLogParseExceptions(), ingestionSchema.getTuningConfig().getMaxParseExceptions(), ingestionSchema.getTuningConfig().getMaxSavedParseExceptions());
        final InputSource inputSource = ingestionSchema.getIOConfig().getNonNullInputSource(ingestionSchema.getDataSchema().getParser());
        final ParallelIndexSupervisorTaskClient taskClient = toolbox.getSupervisorTaskClientFactory().build(new ClientBasedTaskInfoProvider(toolbox.getIndexingServiceClient()), getId(), // always use a single http thread
        1, ingestionSchema.getTuningConfig().getChatHandlerTimeout(), ingestionSchema.getTuningConfig().getChatHandlerNumRetries());
        ingestionState = IngestionState.BUILD_SEGMENTS;
        final Set<DataSegment> pushedSegments = generateAndPushSegments(toolbox, taskClient, inputSource, toolbox.getIndexingTmpDir());
        // Find inputSegments overshadowed by pushedSegments
        final Set<DataSegment> allSegments = new HashSet<>(getTaskLockHelper().getLockedExistingSegments());
        allSegments.addAll(pushedSegments);
        final VersionedIntervalTimeline<String, DataSegment> timeline = VersionedIntervalTimeline.forSegments(allSegments);
        final Set<DataSegment> oldSegments = FluentIterable.from(timeline.findFullyOvershadowed()).transformAndConcat(TimelineObjectHolder::getObject).transform(PartitionChunk::getObject).toSet();
        Map<String, TaskReport> taskReport = getTaskCompletionReports();
        taskClient.report(supervisorTaskId, new PushedSegmentsReport(getId(), oldSegments, pushedSegments, taskReport));
        toolbox.getTaskReportFileWriter().write(getId(), taskReport);
        return TaskStatus.success(getId());
    } catch (Exception e) {
        LOG.error(e, "Encountered exception in parallel sub task.");
        errorMsg = Throwables.getStackTraceAsString(e);
        toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
        return TaskStatus.failure(getId(), errorMsg);
    } finally {
        toolbox.getChatHandlerProvider().unregister(getId());
    }
}
Also used : InputSource(org.apache.druid.data.input.InputSource) TaskReport(org.apache.druid.indexing.common.TaskReport) IngestionStatsAndErrorsTaskReport(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport) DataSegment(org.apache.druid.timeline.DataSegment) TimeoutException(java.util.concurrent.TimeoutException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) ParseExceptionHandler(org.apache.druid.segment.incremental.ParseExceptionHandler) ClientBasedTaskInfoProvider(org.apache.druid.indexing.common.task.ClientBasedTaskInfoProvider) HashSet(java.util.HashSet)

Aggregations

TimelineObjectHolder (org.apache.druid.timeline.TimelineObjectHolder)28 DataSegment (org.apache.druid.timeline.DataSegment)23 Interval (org.joda.time.Interval)18 ArrayList (java.util.ArrayList)14 Test (org.junit.Test)12 List (java.util.List)11 TableDataSource (org.apache.druid.query.TableDataSource)10 Map (java.util.Map)9 IOException (java.io.IOException)8 HashSet (java.util.HashSet)8 CountDownLatch (java.util.concurrent.CountDownLatch)8 VersionedIntervalTimeline (org.apache.druid.timeline.VersionedIntervalTimeline)8 ImmutableList (com.google.common.collect.ImmutableList)7 ISE (org.apache.druid.java.util.common.ISE)7 PartitionChunk (org.apache.druid.timeline.partition.PartitionChunk)7 Preconditions (com.google.common.base.Preconditions)6 Collections (java.util.Collections)6 HashMap (java.util.HashMap)6 Logger (org.apache.druid.java.util.common.logger.Logger)6 File (java.io.File)5