Search in sources :

Example 11 with IngestionStatsAndErrorsTaskReportData

use of org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData in project druid by druid-io.

the class IndexTaskTest method testMultipleParseExceptionsFailureAtDeterminePartitions.

@Test
public void testMultipleParseExceptionsFailureAtDeterminePartitions() throws Exception {
    final File tmpDir = temporaryFolder.newFolder();
    final File tmpFile = File.createTempFile("druid", "index", tmpDir);
    try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
        writer.write("time,dim,dimLong,dimFloat,val\n");
        // unparseable
        writer.write("unparseable,a,2,3.0,1\n");
        // valid row
        writer.write("2014-01-01T00:00:10Z,a,2,3.0,1\n");
        // unparseable
        writer.write("9.0,a,2,3.0,1\n");
        // thrown away
        writer.write("3014-03-01T00:00:10Z,outsideofinterval,2,3.0,1\n");
        // unparseable
        writer.write("99999999999-01-01T00:00:10Z,b,2,3.0,1\n");
    }
    // Allow up to 3 parse exceptions, and save up to 2 parse exceptions
    final IndexTuningConfig tuningConfig = new IndexTuningConfig(null, null, null, null, null, null, null, null, null, null, new HashedPartitionsSpec(2, null, null), INDEX_SPEC, null, null, true, false, null, null, null, true, 2, 5, null, null);
    final TimestampSpec timestampSpec = new TimestampSpec("time", "auto", null);
    final DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("dim"), new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat")));
    final List<String> columns = Arrays.asList("time", "dim", "dimLong", "dimFloat", "val");
    final IndexIngestionSpec ingestionSpec;
    List<String> expectedMessages;
    if (useInputFormatApi) {
        ingestionSpec = createIngestionSpec(jsonMapper, tmpDir, timestampSpec, dimensionsSpec, new CsvInputFormat(columns, null, null, true, 0), null, null, tuningConfig, false, false);
        expectedMessages = Arrays.asList(StringUtils.format("Timestamp[99999999999-01-01T00:00:10Z] is unparseable! Event: {time=99999999999-01-01T00:00:10Z, dim=b, dimLong=2, dimFloat=3.0, val=1} (Path: %s, Record: 3, Line: 6)", tmpFile.toURI()), StringUtils.format("Timestamp[9.0] is unparseable! Event: {time=9.0, dim=a, dimLong=2, dimFloat=3.0, val=1} (Path: %s, Record: 2, Line: 4)", tmpFile.toURI()), StringUtils.format("Timestamp[unparseable] is unparseable! Event: {time=unparseable, dim=a, dimLong=2, dimFloat=3.0, val=1} (Path: %s, Record: 1, Line: 2)", tmpFile.toURI()));
    } else {
        ingestionSpec = createIngestionSpec(jsonMapper, tmpDir, new CSVParseSpec(timestampSpec, dimensionsSpec, null, columns, true, 0), null, null, tuningConfig, false, false);
        expectedMessages = Arrays.asList("Timestamp[99999999999-01-01T00:00:10Z] is unparseable! Event: {time=99999999999-01-01T00:00:10Z, dim=b, dimLong=2, dimFloat=3.0, val=1}", "Timestamp[9.0] is unparseable! Event: {time=9.0, dim=a, dimLong=2, dimFloat=3.0, val=1}", "Timestamp[unparseable] is unparseable! Event: {time=unparseable, dim=a, dimLong=2, dimFloat=3.0, val=1}");
    }
    IndexTask indexTask = new IndexTask(null, null, ingestionSpec, null);
    TaskStatus status = runTask(indexTask).lhs;
    Assert.assertEquals(TaskState.FAILED, status.getStatusCode());
    checkTaskStatusErrorMsgForParseExceptionsExceeded(status);
    IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
    Map<String, Object> expectedMetrics = ImmutableMap.of(RowIngestionMeters.DETERMINE_PARTITIONS, ImmutableMap.of(RowIngestionMeters.PROCESSED_WITH_ERROR, 0, RowIngestionMeters.PROCESSED, 1, RowIngestionMeters.UNPARSEABLE, 3, RowIngestionMeters.THROWN_AWAY, useInputFormatApi ? 1 : 2), RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of(RowIngestionMeters.PROCESSED_WITH_ERROR, 0, RowIngestionMeters.PROCESSED, 0, RowIngestionMeters.UNPARSEABLE, 0, RowIngestionMeters.THROWN_AWAY, 0));
    Assert.assertEquals(expectedMetrics, reportData.getRowStats());
    List<LinkedHashMap> parseExceptionReports = (List<LinkedHashMap>) reportData.getUnparseableEvents().get(RowIngestionMeters.DETERMINE_PARTITIONS);
    List<String> actualMessages = parseExceptionReports.stream().map((r) -> {
        return ((List<String>) r.get("details")).get(0);
    }).collect(Collectors.toList());
    Assert.assertEquals(expectedMessages, actualMessages);
    List<String> expectedInputs = Arrays.asList("{time=99999999999-01-01T00:00:10Z, dim=b, dimLong=2, dimFloat=3.0, val=1}", "{time=9.0, dim=a, dimLong=2, dimFloat=3.0, val=1}", "{time=unparseable, dim=a, dimLong=2, dimFloat=3.0, val=1}");
    List<String> actualInputs = parseExceptionReports.stream().map((r) -> {
        return (String) r.get("input");
    }).collect(Collectors.toList());
    Assert.assertEquals(expectedInputs, actualInputs);
}
Also used : TaskReport(org.apache.druid.indexing.common.TaskReport) TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) Arrays(java.util.Arrays) IndexSpec(org.apache.druid.segment.IndexSpec) Pair(org.apache.druid.java.util.common.Pair) Map(java.util.Map) ExpressionTransform(org.apache.druid.segment.transform.ExpressionTransform) AppenderatorsManager(org.apache.druid.segment.realtime.appenderator.AppenderatorsManager) JsonInputFormat(org.apache.druid.data.input.impl.JsonInputFormat) IAE(org.apache.druid.java.util.common.IAE) InputFormat(org.apache.druid.data.input.InputFormat) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) Set(java.util.Set) NoopSegmentHandoffNotifierFactory(org.apache.druid.segment.realtime.plumber.NoopSegmentHandoffNotifierFactory) EqualsVerifier(nl.jqno.equalsverifier.EqualsVerifier) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) StandardCharsets(java.nio.charset.StandardCharsets) TaskState(org.apache.druid.indexer.TaskState) CountDownLatch(java.util.concurrent.CountDownLatch) PartitionIds(org.apache.druid.timeline.partition.PartitionIds) IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) RowIngestionMetersFactory(org.apache.druid.segment.incremental.RowIngestionMetersFactory) SegmentLocalCacheManager(org.apache.druid.segment.loading.SegmentLocalCacheManager) SegmentId(org.apache.druid.timeline.SegmentId) TransformSpec(org.apache.druid.segment.transform.TransformSpec) Granularity(org.apache.druid.java.util.common.granularity.Granularity) SegmentLoaderConfig(org.apache.druid.segment.loading.SegmentLoaderConfig) HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) RunWith(org.junit.runner.RunWith) TaskStatus(org.apache.druid.indexer.TaskStatus) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Interval(org.joda.time.Interval) StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) PartitionsSpec(org.apache.druid.indexer.partitions.PartitionsSpec) Nullable(javax.annotation.Nullable) HashPartitionFunction(org.apache.druid.timeline.partition.HashPartitionFunction) Before(org.junit.Before) BufferedWriter(java.io.BufferedWriter) GranularitySpec(org.apache.druid.segment.indexing.granularity.GranularitySpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) File(java.io.File) Preconditions(com.google.common.base.Preconditions) Assert(org.junit.Assert) DataSchema(org.apache.druid.segment.indexing.DataSchema) CoreMatchers(org.hamcrest.CoreMatchers) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) LocalInputSource(org.apache.druid.data.input.impl.LocalInputSource) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) CSVParseSpec(org.apache.druid.data.input.impl.CSVParseSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Event(org.apache.druid.java.util.emitter.core.Event) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Parameterized(org.junit.runners.Parameterized) ParseSpec(org.apache.druid.data.input.impl.ParseSpec) Sequence(org.apache.druid.java.util.common.guava.Sequence) ShardSpec(org.apache.druid.timeline.partition.ShardSpec) LocalFirehoseFactory(org.apache.druid.segment.realtime.firehose.LocalFirehoseFactory) ImmutableMap(com.google.common.collect.ImmutableMap) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) StringUtils(org.apache.druid.java.util.common.StringUtils) CsvInputFormat(org.apache.druid.data.input.impl.CsvInputFormat) HashedPartitionsSpec(org.apache.druid.indexer.partitions.HashedPartitionsSpec) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) LockGranularity(org.apache.druid.indexing.common.LockGranularity) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) List(java.util.List) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) DataSegment(org.apache.druid.timeline.DataSegment) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) SegmentAllocateAction(org.apache.druid.indexing.common.actions.SegmentAllocateAction) Intervals(org.apache.druid.java.util.common.Intervals) HashMap(java.util.HashMap) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) HashSet(java.util.HashSet) ImmutableList(com.google.common.collect.ImmutableList) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) Files(com.google.common.io.Files) NumberedOverwriteShardSpec(org.apache.druid.timeline.partition.NumberedOverwriteShardSpec) DimensionSelector(org.apache.druid.segment.DimensionSelector) ExpectedException(org.junit.rules.ExpectedException) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) VirtualColumns(org.apache.druid.segment.VirtualColumns) WindowedStorageAdapter(org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) StorageLocationConfig(org.apache.druid.segment.loading.StorageLocationConfig) Granularities(org.apache.druid.java.util.common.granularity.Granularities) TimeUnit(java.util.concurrent.TimeUnit) Rule(org.junit.Rule) SingleDimensionPartitionsSpec(org.apache.druid.indexer.partitions.SingleDimensionPartitionsSpec) Cursor(org.apache.druid.segment.Cursor) SegmentCacheManager(org.apache.druid.segment.loading.SegmentCacheManager) IndexIO(org.apache.druid.segment.IndexIO) Collections(java.util.Collections) TemporaryFolder(org.junit.rules.TemporaryFolder) HashedPartitionsSpec(org.apache.druid.indexer.partitions.HashedPartitionsSpec) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) TaskStatus(org.apache.druid.indexer.TaskStatus) BufferedWriter(java.io.BufferedWriter) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) LinkedHashMap(java.util.LinkedHashMap) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) CSVParseSpec(org.apache.druid.data.input.impl.CSVParseSpec) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) CsvInputFormat(org.apache.druid.data.input.impl.CsvInputFormat) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) File(java.io.File) IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) Test(org.junit.Test)

Example 12 with IngestionStatsAndErrorsTaskReportData

use of org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData in project druid by druid-io.

the class TaskReportSerdeTest method testSerde.

@Test
public void testSerde() throws Exception {
    IngestionStatsAndErrorsTaskReport report1 = new IngestionStatsAndErrorsTaskReport("testID", new IngestionStatsAndErrorsTaskReportData(IngestionState.BUILD_SEGMENTS, ImmutableMap.of("hello", "world"), ImmutableMap.of("number", 1234), "an error message", true, 1000L));
    String report1serialized = jsonMapper.writeValueAsString(report1);
    IngestionStatsAndErrorsTaskReport report2 = jsonMapper.readValue(report1serialized, IngestionStatsAndErrorsTaskReport.class);
    Assert.assertEquals(report1, report2);
    Assert.assertEquals(report1.hashCode(), report2.hashCode());
    Map<String, TaskReport> reportMap1 = TaskReport.buildTaskReports(report1);
    String reportMapSerialized = jsonMapper.writeValueAsString(reportMap1);
    Map<String, TaskReport> reportMap2 = jsonMapper.readValue(reportMapSerialized, new TypeReference<Map<String, TaskReport>>() {
    });
    Assert.assertEquals(reportMap1, reportMap2);
}
Also used : IngestionStatsAndErrorsTaskReport(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport) TaskReport(org.apache.druid.indexing.common.TaskReport) IngestionStatsAndErrorsTaskReport(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) ImmutableMap(com.google.common.collect.ImmutableMap) Map(java.util.Map) Test(org.junit.Test)

Example 13 with IngestionStatsAndErrorsTaskReportData

use of org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData in project druid by druid-io.

the class KinesisIndexTaskTest method testMultipleParseExceptionsSuccess.

@Test(timeout = 120_000L)
public void testMultipleParseExceptionsSuccess() throws Exception {
    reportParseExceptions = false;
    maxParseExceptions = 7;
    maxSavedParseExceptions = 7;
    recordSupplier.assign(EasyMock.anyObject());
    EasyMock.expectLastCall().anyTimes();
    EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes();
    recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString());
    EasyMock.expectLastCall().anyTimes();
    EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(generateRecords(2, 13)).once();
    recordSupplier.close();
    EasyMock.expectLastCall().once();
    replayAll();
    final KinesisIndexTask task = createTask(null, new KinesisIndexTaskIOConfig(0, "sequence0", new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "12")), true, null, null, INPUT_FORMAT, "awsEndpoint", null, null, null, null, false));
    final ListenableFuture<TaskStatus> future = runTask(task);
    TaskStatus status = future.get();
    // Wait for task to exit
    Assert.assertEquals(TaskState.SUCCESS, status.getStatusCode());
    verifyAll();
    Assert.assertNull(status.getErrorMsg());
    // Check metrics
    Assert.assertEquals(4, task.getRunner().getRowIngestionMeters().getProcessed());
    Assert.assertEquals(3, task.getRunner().getRowIngestionMeters().getProcessedWithError());
    Assert.assertEquals(4, task.getRunner().getRowIngestionMeters().getUnparseable());
    // Check published metadata
    assertEqualsExceptVersion(ImmutableList.of(sdd("2010/P1D", 0), sdd("2011/P1D", 0), sdd("2013/P1D", 0), sdd("2049/P1D", 0)), publishedDescriptors());
    Assert.assertEquals(new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "12"))), newDataSchemaMetadata());
    IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
    Map<String, Object> expectedMetrics = ImmutableMap.of(RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of(RowIngestionMeters.PROCESSED, 4, RowIngestionMeters.PROCESSED_WITH_ERROR, 3, RowIngestionMeters.UNPARSEABLE, 4, RowIngestionMeters.THROWN_AWAY, 0));
    Assert.assertEquals(expectedMetrics, reportData.getRowStats());
    List<LinkedHashMap> parseExceptionReports = (List<LinkedHashMap>) reportData.getUnparseableEvents().get(RowIngestionMeters.BUILD_SEGMENTS);
    List<String> expectedMessages = Arrays.asList("Unable to parse value[notanumber] for field[met1]", "could not convert value [notanumber] to float", "could not convert value [notanumber] to long", "Timestamp[null] is unparseable! Event: {} (Record: 1)", "Unable to parse [] as the intermediateRow resulted in empty input row (Record: 1)", "Unable to parse row [unparseable] (Record: 1)", "Encountered row with timestamp[246140482-04-24T15:36:27.903Z] that cannot be represented as a long: [{timestamp=246140482-04-24T15:36:27.903Z, dim1=x, dim2=z, dimLong=10, dimFloat=20.0, met1=1.0}] (Record: 1)");
    List<String> actualMessages = parseExceptionReports.stream().map((r) -> {
        return ((List<String>) r.get("details")).get(0);
    }).collect(Collectors.toList());
    Assert.assertEquals(expectedMessages, actualMessages);
    List<String> expectedInputs = Arrays.asList("{timestamp=2049, dim1=f, dim2=y, dimLong=10, dimFloat=20.0, met1=notanumber}", "{timestamp=2049, dim1=f, dim2=y, dimLong=10, dimFloat=notanumber, met1=1.0}", "{timestamp=2049, dim1=f, dim2=y, dimLong=notanumber, dimFloat=20.0, met1=1.0}", "{}", "", "unparseable", "{timestamp=246140482-04-24T15:36:27.903Z, dim1=x, dim2=z, dimLong=10, dimFloat=20.0, met1=1.0}");
    List<String> actualInputs = parseExceptionReports.stream().map((r) -> {
        return (String) r.get("input");
    }).collect(Collectors.toList());
    Assert.assertEquals(expectedInputs, actualInputs);
}
Also used : JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) DirectQueryProcessingPool(org.apache.druid.query.DirectQueryProcessingPool) Arrays(java.util.Arrays) LookupNodeService(org.apache.druid.discovery.LookupNodeService) TestDataSegmentAnnouncer(org.apache.druid.indexing.test.TestDataSegmentAnnouncer) DataSourceMetadata(org.apache.druid.indexing.overlord.DataSourceMetadata) Map(java.util.Map) ExpressionTransform(org.apache.druid.segment.transform.ExpressionTransform) NoopJoinableFactory(org.apache.druid.segment.join.NoopJoinableFactory) NoopIndexingServiceClient(org.apache.druid.client.indexing.NoopIndexingServiceClient) JacksonInject(com.fasterxml.jackson.annotation.JacksonInject) AfterClass(org.junit.AfterClass) Execs(org.apache.druid.java.util.common.concurrent.Execs) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) CacheConfig(org.apache.druid.client.cache.CacheConfig) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Set(java.util.Set) Executors(java.util.concurrent.Executors) TaskState(org.apache.druid.indexer.TaskState) TestDerbyConnector(org.apache.druid.metadata.TestDerbyConnector) TaskActionClientFactory(org.apache.druid.indexing.common.actions.TaskActionClientFactory) TransformSpec(org.apache.druid.segment.transform.TransformSpec) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) DataSegmentPusher(org.apache.druid.segment.loading.DataSegmentPusher) InjectableValues(com.fasterxml.jackson.databind.InjectableValues) SeekableStreamIndexTaskTestBase(org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskTestBase) DruidNodeAnnouncer(org.apache.druid.discovery.DruidNodeAnnouncer) RunWith(org.junit.runner.RunWith) TaskAuditLogConfig(org.apache.druid.indexing.common.actions.TaskAuditLogConfig) TaskStatus(org.apache.druid.indexer.TaskStatus) LinkedHashMap(java.util.LinkedHashMap) LocalDataSegmentPusherConfig(org.apache.druid.segment.loading.LocalDataSegmentPusherConfig) SupervisorManager(org.apache.druid.indexing.overlord.supervisor.SupervisorManager) AuthTestUtils(org.apache.druid.server.security.AuthTestUtils) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) Nullable(javax.annotation.Nullable) Before(org.junit.Before) SeekableStreamEndSequenceNumbers(org.apache.druid.indexing.seekablestream.SeekableStreamEndSequenceNumbers) TaskToolboxFactory(org.apache.druid.indexing.common.TaskToolboxFactory) Executor(java.util.concurrent.Executor) DataSegmentServerAnnouncer(org.apache.druid.server.coordination.DataSegmentServerAnnouncer) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) File(java.io.File) Futures(com.google.common.util.concurrent.Futures) TreeMap(java.util.TreeMap) DefaultQueryRunnerFactoryConglomerate(org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate) DruidNode(org.apache.druid.server.DruidNode) Named(com.google.inject.name.Named) Assert(org.junit.Assert) DataSchema(org.apache.druid.segment.indexing.DataSchema) Module(com.fasterxml.jackson.databind.Module) StreamPartition(org.apache.druid.indexing.seekablestream.common.StreamPartition) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) OrderedPartitionableRecord(org.apache.druid.indexing.seekablestream.common.OrderedPartitionableRecord) LocalTaskActionClientFactory(org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory) TimeoutException(java.util.concurrent.TimeoutException) TaskResource(org.apache.druid.indexing.common.task.TaskResource) SeekableStreamSupervisor(org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisor) SequenceMetadata(org.apache.druid.indexing.seekablestream.SequenceMetadata) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Task(org.apache.druid.indexing.common.task.Task) After(org.junit.After) ServerType(org.apache.druid.server.coordination.ServerType) NoopChatHandlerProvider(org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider) DerbyMetadataStorageActionHandlerFactory(org.apache.druid.metadata.DerbyMetadataStorageActionHandlerFactory) Parameterized(org.junit.runners.Parameterized) SeekableStreamStartSequenceNumbers(org.apache.druid.indexing.seekablestream.SeekableStreamStartSequenceNumbers) DateTimes(org.apache.druid.java.util.common.DateTimes) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) StringUtils(org.apache.druid.java.util.common.StringUtils) Collectors(java.util.stream.Collectors) LockGranularity(org.apache.druid.indexing.common.LockGranularity) TestUtils(org.apache.druid.indexing.common.TestUtils) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) IndexerSQLMetadataStorageCoordinator(org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator) Objects(java.util.Objects) DataNodeService(org.apache.druid.discovery.DataNodeService) List(java.util.List) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) ByteEntity(org.apache.druid.data.input.impl.ByteEntity) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) KinesisSupervisor(org.apache.druid.indexing.kinesis.supervisor.KinesisSupervisor) MetadataTaskStorage(org.apache.druid.indexing.overlord.MetadataTaskStorage) MapCache(org.apache.druid.client.cache.MapCache) MoreExecutors(com.google.common.util.concurrent.MoreExecutors) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) BeforeClass(org.junit.BeforeClass) SingleFileTaskReportFileWriter(org.apache.druid.indexing.common.SingleFileTaskReportFileWriter) TaskStorageConfig(org.apache.druid.indexing.common.config.TaskStorageConfig) HashMap(java.util.HashMap) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) TaskActionToolbox(org.apache.druid.indexing.common.actions.TaskActionToolbox) ConcurrentMap(java.util.concurrent.ConcurrentMap) HashSet(java.util.HashSet) JsonTypeName(com.fasterxml.jackson.annotation.JsonTypeName) ImmutableList(com.google.common.collect.ImmutableList) NoopEmitter(org.apache.druid.java.util.emitter.core.NoopEmitter) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) Period(org.joda.time.Period) TestAppenderatorsManager(org.apache.druid.indexing.common.task.TestAppenderatorsManager) TaskLockbox(org.apache.druid.indexing.overlord.TaskLockbox) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) IndexTaskTest(org.apache.druid.indexing.common.task.IndexTaskTest) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) AWSCredentialsConfig(org.apache.druid.common.aws.AWSCredentialsConfig) TestDataSegmentKiller(org.apache.druid.indexing.test.TestDataSegmentKiller) TimeUnit(java.util.concurrent.TimeUnit) TestHelper(org.apache.druid.segment.TestHelper) Rule(org.junit.Rule) MonitorScheduler(org.apache.druid.java.util.metrics.MonitorScheduler) JsonCreator(com.fasterxml.jackson.annotation.JsonCreator) LocalDataSegmentPusher(org.apache.druid.segment.loading.LocalDataSegmentPusher) AsyncFunction(com.google.common.util.concurrent.AsyncFunction) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) Collections(java.util.Collections) SeekableStreamIndexTaskRunner(org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskRunner) TemporaryFolder(org.junit.rules.TemporaryFolder) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) TaskStatus(org.apache.druid.indexer.TaskStatus) LinkedHashMap(java.util.LinkedHashMap) SeekableStreamStartSequenceNumbers(org.apache.druid.indexing.seekablestream.SeekableStreamStartSequenceNumbers) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) SeekableStreamEndSequenceNumbers(org.apache.druid.indexing.seekablestream.SeekableStreamEndSequenceNumbers) Test(org.junit.Test) IndexTaskTest(org.apache.druid.indexing.common.task.IndexTaskTest)

Example 14 with IngestionStatsAndErrorsTaskReportData

use of org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData in project druid by druid-io.

the class AppenderatorDriverRealtimeIndexTaskTest method testNoReportParseExceptions.

@Test(timeout = 60_000L)
public void testNoReportParseExceptions() throws Exception {
    expectPublishedSegments(1);
    final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, TransformSpec.NONE, false, 0, true, null, 1);
    final ListenableFuture<TaskStatus> statusFuture = runTask(task);
    // Wait for firehose to show up, it starts off null.
    while (task.getFirehose() == null) {
        Thread.sleep(50);
    }
    final TestFirehose firehose = (TestFirehose) task.getFirehose();
    firehose.addRows(Arrays.asList(// Good row- will be processed.
    ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "1"), // Null row- will be thrown away.
    null, // Bad metric- will count as processed, but that particular metric won't update.
    ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "foo"), // Bad row- will be unparseable.
    ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"), // Good row- will be processed.
    ImmutableMap.of("t", now.getMillis(), "dim2", "bar", "met1", 2.0)));
    // Stop the firehose, this will drain out existing events.
    firehose.close();
    // Wait for publish.
    Collection<DataSegment> publishedSegments = awaitSegments();
    DataSegment publishedSegment = Iterables.getOnlyElement(publishedSegments);
    // Check metrics.
    Assert.assertEquals(2, task.getRowIngestionMeters().getProcessed());
    Assert.assertEquals(1, task.getRowIngestionMeters().getProcessedWithError());
    Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway());
    Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable());
    // Do some queries.
    Assert.assertEquals(3, sumMetric(task, null, "rows").longValue());
    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
    awaitHandoffs();
    // Simulate handoff.
    for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
        final Pair<Executor, Runnable> executorRunnablePair = entry.getValue();
        Assert.assertEquals(new SegmentDescriptor(publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum()), entry.getKey());
        executorRunnablePair.lhs.execute(executorRunnablePair.rhs);
    }
    handOffCallbacks.clear();
    Map<String, Object> expectedMetrics = ImmutableMap.of(RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of(RowIngestionMeters.PROCESSED, 2, RowIngestionMeters.PROCESSED_WITH_ERROR, 1, RowIngestionMeters.UNPARSEABLE, 2, RowIngestionMeters.THROWN_AWAY, 0));
    // Wait for the task to finish.
    final TaskStatus taskStatus = statusFuture.get();
    Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode());
    IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
    Assert.assertEquals(expectedMetrics, reportData.getRowStats());
}
Also used : IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) TaskStatus(org.apache.druid.indexer.TaskStatus) DataSegment(org.apache.druid.timeline.DataSegment) Executor(java.util.concurrent.Executor) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Pair(org.apache.druid.java.util.common.Pair) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 15 with IngestionStatsAndErrorsTaskReportData

use of org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData in project druid by druid-io.

the class AppenderatorDriverRealtimeIndexTaskTest method testMultipleParseExceptionsSuccess.

@Test(timeout = 60_000L)
public void testMultipleParseExceptionsSuccess() throws Exception {
    expectPublishedSegments(1);
    final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, TransformSpec.NONE, false, 0, true, 10, 10);
    final ListenableFuture<TaskStatus> statusFuture = runTask(task);
    // Wait for firehose to show up, it starts off null.
    while (task.getFirehose() == null) {
        Thread.sleep(50);
    }
    final TestFirehose firehose = (TestFirehose) task.getFirehose();
    firehose.addRows(Arrays.asList(// Good row- will be processed.
    ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "1"), // Null row- will be thrown away.
    null, // Bad metric- will count as processed, but that particular metric won't update.
    ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "foo"), // Bad long dim- will count as processed, but bad dims will get default values
    ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "dimLong", "notnumber", "dimFloat", "notnumber", "met1", "foo"), // Bad row- will be unparseable.
    ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"), // Good row- will be processed.
    ImmutableMap.of("t", 1521251960729L, "dim2", "bar", "met1", 2.0)));
    // Stop the firehose, this will drain out existing events.
    firehose.close();
    // Wait for publish.
    Collection<DataSegment> publishedSegments = awaitSegments();
    DataSegment publishedSegment = Iterables.getOnlyElement(publishedSegments);
    // Check metrics.
    Assert.assertEquals(2, task.getRowIngestionMeters().getProcessed());
    Assert.assertEquals(2, task.getRowIngestionMeters().getProcessedWithError());
    Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway());
    Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable());
    // Do some queries.
    Assert.assertEquals(4, sumMetric(task, null, "rows").longValue());
    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
    awaitHandoffs();
    // Simulate handoff.
    for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
        final Pair<Executor, Runnable> executorRunnablePair = entry.getValue();
        Assert.assertEquals(new SegmentDescriptor(publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum()), entry.getKey());
        executorRunnablePair.lhs.execute(executorRunnablePair.rhs);
    }
    handOffCallbacks.clear();
    Map<String, Object> expectedMetrics = ImmutableMap.of(RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of(RowIngestionMeters.PROCESSED, 2, RowIngestionMeters.PROCESSED_WITH_ERROR, 2, RowIngestionMeters.UNPARSEABLE, 2, RowIngestionMeters.THROWN_AWAY, 0));
    // Wait for the task to finish.
    final TaskStatus taskStatus = statusFuture.get();
    Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode());
    IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
    Assert.assertEquals(expectedMetrics, reportData.getRowStats());
    List<LinkedHashMap> parseExceptionReports = (List<LinkedHashMap>) reportData.getUnparseableEvents().get(RowIngestionMeters.BUILD_SEGMENTS);
    List<String> expectedMessages = Arrays.asList("Timestamp[null] is unparseable! Event: {dim1=foo, met1=2.0, __fail__=x}", "could not convert value [notnumber] to long", "Unable to parse value[foo] for field[met1]", "Timestamp[null] is unparseable! Event: null");
    List<String> actualMessages = parseExceptionReports.stream().map((r) -> {
        return ((List<String>) r.get("details")).get(0);
    }).collect(Collectors.toList());
    Assert.assertEquals(expectedMessages, actualMessages);
    List<String> expectedInputs = Arrays.asList("{dim1=foo, met1=2.0, __fail__=x}", "{t=1521251960729, dim1=foo, dimLong=notnumber, dimFloat=notnumber, met1=foo}", "{t=1521251960729, dim1=foo, met1=foo}", null);
    List<String> actualInputs = parseExceptionReports.stream().map((r) -> {
        return (String) r.get("input");
    }).collect(Collectors.toList());
    Assert.assertEquals(expectedInputs, actualInputs);
    Assert.assertEquals(IngestionState.COMPLETED, reportData.getIngestionState());
}
Also used : TaskReport(org.apache.druid.indexing.common.TaskReport) TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) DirectQueryProcessingPool(org.apache.druid.query.DirectQueryProcessingPool) Arrays(java.util.Arrays) LookupNodeService(org.apache.druid.discovery.LookupNodeService) TestDataSegmentAnnouncer(org.apache.druid.indexing.test.TestDataSegmentAnnouncer) TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) IngestionState(org.apache.druid.indexer.IngestionState) Pair(org.apache.druid.java.util.common.Pair) DataSourceMetadata(org.apache.druid.indexing.overlord.DataSourceMetadata) Map(java.util.Map) ExpressionTransform(org.apache.druid.segment.transform.ExpressionTransform) TestDataSegmentPusher(org.apache.druid.indexing.test.TestDataSegmentPusher) NoopJoinableFactory(org.apache.druid.segment.join.NoopJoinableFactory) NoopIndexingServiceClient(org.apache.druid.client.indexing.NoopIndexingServiceClient) Execs(org.apache.druid.java.util.common.concurrent.Execs) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) CacheConfig(org.apache.druid.client.cache.CacheConfig) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) InputRow(org.apache.druid.data.input.InputRow) TaskState(org.apache.druid.indexer.TaskState) CountDownLatch(java.util.concurrent.CountDownLatch) Firehose(org.apache.druid.data.input.Firehose) DimFilter(org.apache.druid.query.filter.DimFilter) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) TestDerbyConnector(org.apache.druid.metadata.TestDerbyConnector) TaskActionClientFactory(org.apache.druid.indexing.common.actions.TaskActionClientFactory) RealtimeAppenderatorTuningConfig(org.apache.druid.indexing.common.index.RealtimeAppenderatorTuningConfig) TaskStorage(org.apache.druid.indexing.overlord.TaskStorage) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) TransformSpec(org.apache.druid.segment.transform.TransformSpec) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) Iterables(com.google.common.collect.Iterables) DruidNodeAnnouncer(org.apache.druid.discovery.DruidNodeAnnouncer) ParseException(org.apache.druid.java.util.common.parsers.ParseException) TaskAuditLogConfig(org.apache.druid.indexing.common.actions.TaskAuditLogConfig) TimeAndDimsParseSpec(org.apache.druid.data.input.impl.TimeAndDimsParseSpec) TaskStatus(org.apache.druid.indexer.TaskStatus) EntryExistsException(org.apache.druid.metadata.EntryExistsException) LinkedHashMap(java.util.LinkedHashMap) SupervisorManager(org.apache.druid.indexing.overlord.supervisor.SupervisorManager) AuthTestUtils(org.apache.druid.server.security.AuthTestUtils) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) Nullable(javax.annotation.Nullable) Before(org.junit.Before) FirehoseFactory(org.apache.druid.data.input.FirehoseFactory) TaskToolboxFactory(org.apache.druid.indexing.common.TaskToolboxFactory) Files(java.nio.file.Files) Executor(java.util.concurrent.Executor) DataSegmentServerAnnouncer(org.apache.druid.server.coordination.DataSegmentServerAnnouncer) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) FileUtils(org.apache.commons.io.FileUtils) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) File(java.io.File) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Result(org.apache.druid.query.Result) HeapMemoryTaskStorage(org.apache.druid.indexing.overlord.HeapMemoryTaskStorage) DefaultQueryRunnerFactoryConglomerate(org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate) DruidNode(org.apache.druid.server.DruidNode) Assert(org.junit.Assert) ArrayDeque(java.util.ArrayDeque) DataSchema(org.apache.druid.segment.indexing.DataSchema) QueryPlus(org.apache.druid.query.QueryPlus) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) LocalTaskActionClientFactory(org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) Druids(org.apache.druid.query.Druids) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) After(org.junit.After) ServerType(org.apache.druid.server.coordination.ServerType) TypeReference(com.fasterxml.jackson.core.type.TypeReference) NoopChatHandlerProvider(org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider) DateTimes(org.apache.druid.java.util.common.DateTimes) RealtimeAppenderatorIngestionSpec(org.apache.druid.indexing.common.index.RealtimeAppenderatorIngestionSpec) JacksonUtils(org.apache.druid.java.util.common.jackson.JacksonUtils) ImmutableMap(com.google.common.collect.ImmutableMap) SegmentPublishResult(org.apache.druid.indexing.overlord.SegmentPublishResult) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) StringUtils(org.apache.druid.java.util.common.StringUtils) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) RealtimeIOConfig(org.apache.druid.segment.indexing.RealtimeIOConfig) Collectors(java.util.stream.Collectors) TestUtils(org.apache.druid.indexing.common.TestUtils) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) IndexerSQLMetadataStorageCoordinator(org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator) DataNodeService(org.apache.druid.discovery.DataNodeService) List(java.util.List) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) DataSegment(org.apache.druid.timeline.DataSegment) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) MapCache(org.apache.druid.client.cache.MapCache) Logger(org.apache.druid.java.util.common.logger.Logger) MoreExecutors(com.google.common.util.concurrent.MoreExecutors) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) SingleFileTaskReportFileWriter(org.apache.druid.indexing.common.SingleFileTaskReportFileWriter) TaskStorageConfig(org.apache.druid.indexing.common.config.TaskStorageConfig) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) Deque(java.util.Deque) TaskActionToolbox(org.apache.druid.indexing.common.actions.TaskActionToolbox) ImmutableList(com.google.common.collect.ImmutableList) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) NoopEmitter(org.apache.druid.java.util.emitter.core.NoopEmitter) ExpectedException(org.junit.rules.ExpectedException) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) Period(org.joda.time.Period) TaskLockbox(org.apache.druid.indexing.overlord.TaskLockbox) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) TestDataSegmentKiller(org.apache.druid.indexing.test.TestDataSegmentKiller) Granularities(org.apache.druid.java.util.common.granularity.Granularities) TimeUnit(java.util.concurrent.TimeUnit) TestHelper(org.apache.druid.segment.TestHelper) Rule(org.junit.Rule) NullHandling(org.apache.druid.common.config.NullHandling) MonitorScheduler(org.apache.druid.java.util.metrics.MonitorScheduler) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) TemporaryFolder(org.junit.rules.TemporaryFolder) IngestionStatsAndErrorsTaskReportData(org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData) TaskStatus(org.apache.druid.indexer.TaskStatus) DataSegment(org.apache.druid.timeline.DataSegment) LinkedHashMap(java.util.LinkedHashMap) Executor(java.util.concurrent.Executor) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Pair(org.apache.druid.java.util.common.Pair) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

IngestionStatsAndErrorsTaskReportData (org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData)17 ImmutableMap (com.google.common.collect.ImmutableMap)16 Map (java.util.Map)16 IOException (java.io.IOException)15 List (java.util.List)15 LinkedHashMap (java.util.LinkedHashMap)14 TaskStatus (org.apache.druid.indexer.TaskStatus)14 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)13 ImmutableList (com.google.common.collect.ImmutableList)13 File (java.io.File)13 Arrays (java.util.Arrays)13 Set (java.util.Set)13 TimeUnit (java.util.concurrent.TimeUnit)13 Collectors (java.util.stream.Collectors)13 Nullable (javax.annotation.Nullable)13 TaskState (org.apache.druid.indexer.TaskState)13 Test (org.junit.Test)11 TaskReport (org.apache.druid.indexing.common.TaskReport)10 StringUtils (org.apache.druid.java.util.common.StringUtils)10 Pair (org.apache.druid.java.util.common.Pair)9