Search in sources :

Example 1 with IndexTask

use of org.apache.druid.indexing.common.task.IndexTask in project druid by druid-io.

the class ParallelIndexSupervisorTask method runSequential.

private TaskStatus runSequential(TaskToolbox toolbox) throws Exception {
    IndexTask sequentialIndexTask = new IndexTask(getId(), getGroupId(), getTaskResource(), getDataSource(), baseSubtaskSpecName, new IndexIngestionSpec(getIngestionSchema().getDataSchema(), getIngestionSchema().getIOConfig(), convertToIndexTuningConfig(getIngestionSchema().getTuningConfig())), getContext(), getIngestionSchema().getTuningConfig().getMaxAllowedLockCount());
    if (currentSubTaskHolder.setTask(sequentialIndexTask) && sequentialIndexTask.isReady(toolbox.getTaskActionClient())) {
        return sequentialIndexTask.run(toolbox);
    } else {
        String msg = "Task was asked to stop. Finish as failed";
        LOG.info(msg);
        return TaskStatus.failure(getId(), msg);
    }
}
Also used : IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) AbstractBatchIndexTask(org.apache.druid.indexing.common.task.AbstractBatchIndexTask) IndexTask(org.apache.druid.indexing.common.task.IndexTask)

Example 2 with IndexTask

use of org.apache.druid.indexing.common.task.IndexTask in project druid by druid-io.

the class ParallelIndexSupervisorTask method doGetLiveReports.

@VisibleForTesting
public Map<String, Object> doGetLiveReports(String full) {
    Map<String, Object> returnMap = new HashMap<>();
    Map<String, Object> ingestionStatsAndErrors = new HashMap<>();
    Map<String, Object> payload = new HashMap<>();
    Pair<Map<String, Object>, Map<String, Object>> rowStatsAndUnparsebleEvents = doGetRowStatsAndUnparseableEvents(full, true);
    // use the sequential task's ingestion state if we were running that mode
    IngestionState ingestionStateForReport;
    if (isParallelMode()) {
        ingestionStateForReport = ingestionState;
    } else {
        IndexTask currentSequentialTask = (IndexTask) currentSubTaskHolder.getTask();
        ingestionStateForReport = currentSequentialTask == null ? ingestionState : currentSequentialTask.getIngestionState();
    }
    payload.put("ingestionState", ingestionStateForReport);
    payload.put("unparseableEvents", rowStatsAndUnparsebleEvents.rhs);
    payload.put("rowStats", rowStatsAndUnparsebleEvents.lhs);
    ingestionStatsAndErrors.put("taskId", getId());
    ingestionStatsAndErrors.put("payload", payload);
    ingestionStatsAndErrors.put("type", "ingestionStatsAndErrors");
    returnMap.put("ingestionStatsAndErrors", ingestionStatsAndErrors);
    return returnMap;
}
Also used : HashMap(java.util.HashMap) IngestionState(org.apache.druid.indexer.IngestionState) AbstractBatchIndexTask(org.apache.druid.indexing.common.task.AbstractBatchIndexTask) IndexTask(org.apache.druid.indexing.common.task.IndexTask) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 3 with IndexTask

use of org.apache.druid.indexing.common.task.IndexTask in project druid by druid-io.

the class TaskLifecycleTest method testIndexTask.

@Test
public void testIndexTask() throws Exception {
    final Task indexTask = new IndexTask(null, null, new IndexIngestionSpec(new DataSchema("foo", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[] { new DoubleSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(Intervals.of("2010-01-01/P2D"))), null), new IndexIOConfig(null, new MockInputSource(), new NoopInputFormat(), false, false), new IndexTuningConfig(null, 10000, null, 10, null, null, null, null, null, null, null, indexSpec, null, 3, false, null, null, null, null, null, null, null, null, null)), null);
    final Optional<TaskStatus> preRunTaskStatus = tsqa.getStatus(indexTask.getId());
    Assert.assertTrue("pre run task status not present", !preRunTaskStatus.isPresent());
    final TaskStatus mergedStatus = runTask(indexTask);
    final TaskStatus status = taskStorage.getStatus(indexTask.getId()).get();
    final List<DataSegment> publishedSegments = BY_INTERVAL_ORDERING.sortedCopy(mdc.getPublished());
    final List<DataSegment> loggedSegments = BY_INTERVAL_ORDERING.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));
    Assert.assertEquals("statusCode", TaskState.SUCCESS, status.getStatusCode());
    Assert.assertEquals(taskLocation, status.getLocation());
    Assert.assertEquals("merged statusCode", TaskState.SUCCESS, mergedStatus.getStatusCode());
    Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
    Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
    Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
    Assert.assertEquals("segment1 interval", Intervals.of("2010-01-01/P1D"), publishedSegments.get(0).getInterval());
    Assert.assertEquals("segment1 dimensions", ImmutableList.of("dim1", "dim2"), publishedSegments.get(0).getDimensions());
    Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());
    Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
    Assert.assertEquals("segment2 interval", Intervals.of("2010-01-02/P1D"), publishedSegments.get(1).getInterval());
    Assert.assertEquals("segment2 dimensions", ImmutableList.of("dim1", "dim2"), publishedSegments.get(1).getDimensions());
    Assert.assertEquals("segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
}
Also used : IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) IndexTask(org.apache.druid.indexing.common.task.IndexTask) KillUnusedSegmentsTask(org.apache.druid.indexing.common.task.KillUnusedSegmentsTask) Task(org.apache.druid.indexing.common.task.Task) AbstractFixedIntervalTask(org.apache.druid.indexing.common.task.AbstractFixedIntervalTask) RealtimeIndexTask(org.apache.druid.indexing.common.task.RealtimeIndexTask) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) IndexTask(org.apache.druid.indexing.common.task.IndexTask) RealtimeIndexTask(org.apache.druid.indexing.common.task.RealtimeIndexTask) TaskStatus(org.apache.druid.indexer.TaskStatus) DataSegment(org.apache.druid.timeline.DataSegment) DataSchema(org.apache.druid.segment.indexing.DataSchema) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) NoopInputFormat(org.apache.druid.data.input.impl.NoopInputFormat) IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) FireDepartmentTest(org.apache.druid.segment.realtime.FireDepartmentTest) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 4 with IndexTask

use of org.apache.druid.indexing.common.task.IndexTask in project druid by druid-io.

the class TaskLifecycleTest method testUnifiedAppenderatorsManagerCleanup.

@Test
public void testUnifiedAppenderatorsManagerCleanup() throws Exception {
    final ExecutorService exec = Executors.newFixedThreadPool(8);
    UnifiedIndexerAppenderatorsManager unifiedIndexerAppenderatorsManager = new UnifiedIndexerAppenderatorsManager(new ForwardingQueryProcessingPool(exec), NoopJoinableFactory.INSTANCE, new WorkerConfig(), MapCache.create(2048), new CacheConfig(), new CachePopulatorStats(), MAPPER, new NoopServiceEmitter(), () -> queryRunnerFactoryConglomerate);
    tb = setUpTaskToolboxFactory(dataSegmentPusher, handoffNotifierFactory, mdc, unifiedIndexerAppenderatorsManager);
    taskRunner = setUpThreadPoolTaskRunner(tb);
    taskQueue = setUpTaskQueue(taskStorage, taskRunner);
    final Task indexTask = new IndexTask(null, null, new IndexIngestionSpec(new DataSchema("foo", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[] { new DoubleSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(Intervals.of("2010-01-01/P2D"))), null), new IndexIOConfig(null, new MockInputSource(), new NoopInputFormat(), false, false), new IndexTuningConfig(null, 10000, null, 10, null, null, null, null, null, null, null, indexSpec, null, 3, false, null, null, null, null, null, null, null, null, null)), null);
    final Optional<TaskStatus> preRunTaskStatus = tsqa.getStatus(indexTask.getId());
    Assert.assertTrue("pre run task status not present", !preRunTaskStatus.isPresent());
    final TaskStatus mergedStatus = runTask(indexTask);
    final TaskStatus status = taskStorage.getStatus(indexTask.getId()).get();
    Assert.assertEquals("statusCode", TaskState.SUCCESS, status.getStatusCode());
    Map<String, UnifiedIndexerAppenderatorsManager.DatasourceBundle> bundleMap = unifiedIndexerAppenderatorsManager.getDatasourceBundles();
    Assert.assertEquals(1, bundleMap.size());
    unifiedIndexerAppenderatorsManager.removeAppenderatorsForTask(indexTask.getId(), "foo");
    Assert.assertTrue(bundleMap.isEmpty());
}
Also used : IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) IndexTask(org.apache.druid.indexing.common.task.IndexTask) KillUnusedSegmentsTask(org.apache.druid.indexing.common.task.KillUnusedSegmentsTask) Task(org.apache.druid.indexing.common.task.Task) AbstractFixedIntervalTask(org.apache.druid.indexing.common.task.AbstractFixedIntervalTask) RealtimeIndexTask(org.apache.druid.indexing.common.task.RealtimeIndexTask) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) ForwardingQueryProcessingPool(org.apache.druid.query.ForwardingQueryProcessingPool) UnifiedIndexerAppenderatorsManager(org.apache.druid.segment.realtime.appenderator.UnifiedIndexerAppenderatorsManager) IndexTask(org.apache.druid.indexing.common.task.IndexTask) RealtimeIndexTask(org.apache.druid.indexing.common.task.RealtimeIndexTask) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) TaskStatus(org.apache.druid.indexer.TaskStatus) DataSchema(org.apache.druid.segment.indexing.DataSchema) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) ExecutorService(java.util.concurrent.ExecutorService) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) WorkerConfig(org.apache.druid.indexing.worker.config.WorkerConfig) NoopInputFormat(org.apache.druid.data.input.impl.NoopInputFormat) CacheConfig(org.apache.druid.client.cache.CacheConfig) IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) FireDepartmentTest(org.apache.druid.segment.realtime.FireDepartmentTest) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 5 with IndexTask

use of org.apache.druid.indexing.common.task.IndexTask in project druid by druid-io.

the class TaskLifecycleTest method testIndexTaskFailure.

@Test
public void testIndexTaskFailure() throws Exception {
    final Task indexTask = new IndexTask(null, null, new IndexIngestionSpec(new DataSchema("foo", null, new AggregatorFactory[] { new DoubleSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(Intervals.of("2010-01-01/P1D"))), null, mapper), new IndexIOConfig(null, new MockExceptionInputSource(), new NoopInputFormat(), false, false), new IndexTuningConfig(null, 10000, null, 10, null, null, null, null, null, null, null, indexSpec, null, 3, false, null, null, null, null, null, null, null, null, null)), null);
    final TaskStatus status = runTask(indexTask);
    Assert.assertEquals("statusCode", TaskState.FAILED, status.getStatusCode());
    Assert.assertEquals(taskLocation, status.getLocation());
    Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) IndexTask(org.apache.druid.indexing.common.task.IndexTask) KillUnusedSegmentsTask(org.apache.druid.indexing.common.task.KillUnusedSegmentsTask) Task(org.apache.druid.indexing.common.task.Task) AbstractFixedIntervalTask(org.apache.druid.indexing.common.task.AbstractFixedIntervalTask) RealtimeIndexTask(org.apache.druid.indexing.common.task.RealtimeIndexTask) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) IndexTask(org.apache.druid.indexing.common.task.IndexTask) RealtimeIndexTask(org.apache.druid.indexing.common.task.RealtimeIndexTask) NoopInputFormat(org.apache.druid.data.input.impl.NoopInputFormat) TaskStatus(org.apache.druid.indexer.TaskStatus) IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) FireDepartmentTest(org.apache.druid.segment.realtime.FireDepartmentTest) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

IndexTask (org.apache.druid.indexing.common.task.IndexTask)6 IndexIngestionSpec (org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec)5 NoopInputFormat (org.apache.druid.data.input.impl.NoopInputFormat)4 TaskStatus (org.apache.druid.indexer.TaskStatus)4 AbstractFixedIntervalTask (org.apache.druid.indexing.common.task.AbstractFixedIntervalTask)4 IndexIOConfig (org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig)4 IndexTuningConfig (org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig)4 KillUnusedSegmentsTask (org.apache.druid.indexing.common.task.KillUnusedSegmentsTask)4 RealtimeIndexTask (org.apache.druid.indexing.common.task.RealtimeIndexTask)4 Task (org.apache.druid.indexing.common.task.Task)4 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)4 DataSchema (org.apache.druid.segment.indexing.DataSchema)4 UniformGranularitySpec (org.apache.druid.segment.indexing.granularity.UniformGranularitySpec)4 FireDepartmentTest (org.apache.druid.segment.realtime.FireDepartmentTest)4 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)4 Test (org.junit.Test)4 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)3 AbstractBatchIndexTask (org.apache.druid.indexing.common.task.AbstractBatchIndexTask)2 DataSegment (org.apache.druid.timeline.DataSegment)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1