Search in sources :

Example 1 with AbstractTask

use of org.apache.druid.indexing.common.task.AbstractTask in project druid by druid-io.

the class ThreadingTaskRunnerTest method testTaskStatusWhenTaskThrowsExceptionWhileRunning.

@Test
public void testTaskStatusWhenTaskThrowsExceptionWhileRunning() throws ExecutionException, InterruptedException {
    ThreadingTaskRunner runner = new ThreadingTaskRunner(mockTaskToolboxFactory(), new TaskConfig(null, null, null, null, ImmutableList.of(), false, new Period("PT0S"), new Period("PT10S"), ImmutableList.of(), false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name()), new WorkerConfig(), new NoopTaskLogs(), new DefaultObjectMapper(), new TestAppenderatorsManager(), new MultipleFileTaskReportFileWriter(), new DruidNode("middleManager", "host", false, 8091, null, true, false));
    Future<TaskStatus> statusFuture = runner.run(new AbstractTask("id", "datasource", null) {

        @Override
        public String getType() {
            return "test";
        }

        @Override
        public boolean isReady(TaskActionClient taskActionClient) {
            return true;
        }

        @Override
        public void stopGracefully(TaskConfig taskConfig) {
        }

        @Override
        public TaskStatus run(TaskToolbox toolbox) {
            throw new RuntimeException("Task failure test");
        }
    });
    TaskStatus status = statusFuture.get();
    Assert.assertEquals(TaskState.FAILED, status.getStatusCode());
    Assert.assertEquals("Failed with an exception. See indexer logs for more details.", status.getErrorMsg());
}
Also used : NoopTaskLogs(org.apache.druid.tasklogs.NoopTaskLogs) AbstractTask(org.apache.druid.indexing.common.task.AbstractTask) Period(org.joda.time.Period) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) TaskStatus(org.apache.druid.indexer.TaskStatus) MultipleFileTaskReportFileWriter(org.apache.druid.indexing.common.MultipleFileTaskReportFileWriter) TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) TaskActionClient(org.apache.druid.indexing.common.actions.TaskActionClient) WorkerConfig(org.apache.druid.indexing.worker.config.WorkerConfig) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) DruidNode(org.apache.druid.server.DruidNode) TestAppenderatorsManager(org.apache.druid.indexing.common.task.TestAppenderatorsManager) Test(org.junit.Test)

Example 2 with AbstractTask

use of org.apache.druid.indexing.common.task.AbstractTask in project druid by druid-io.

the class ParallelIndexSupervisorTaskResourceTest method checkState.

/**
 * Test all endpoints of {@link ParallelIndexSupervisorTask}.
 */
private void checkState(int expectedSucceededTasks, int expectedFailedTask, // subTaskSpecId -> response
Map<String, SubTaskSpecStatus> expectedSubTaskStateResponses) {
    Response response = task.getProgress(newRequest());
    Assert.assertEquals(200, response.getStatus());
    final ParallelIndexingPhaseProgress monitorStatus = (ParallelIndexingPhaseProgress) response.getEntity();
    // numRunningTasks
    Assert.assertEquals(runningTasks.size(), monitorStatus.getRunning());
    // numSucceededTasks
    Assert.assertEquals(expectedSucceededTasks, monitorStatus.getSucceeded());
    // numFailedTasks
    Assert.assertEquals(expectedFailedTask, monitorStatus.getFailed());
    // numCompleteTasks
    Assert.assertEquals(expectedSucceededTasks + expectedFailedTask, monitorStatus.getComplete());
    // numTotalTasks
    Assert.assertEquals(runningTasks.size() + expectedSucceededTasks + expectedFailedTask, monitorStatus.getTotal());
    // runningSubTasks
    response = task.getRunningTasks(newRequest());
    Assert.assertEquals(200, response.getStatus());
    Assert.assertEquals(runningTasks.stream().map(AbstractTask::getId).collect(Collectors.toSet()), new HashSet<>((Collection<String>) response.getEntity()));
    // subTaskSpecs
    response = task.getSubTaskSpecs(newRequest());
    Assert.assertEquals(200, response.getStatus());
    List<SubTaskSpec<SinglePhaseSubTask>> actualSubTaskSpecMap = (List<SubTaskSpec<SinglePhaseSubTask>>) response.getEntity();
    Assert.assertEquals(subTaskSpecs.keySet(), actualSubTaskSpecMap.stream().map(SubTaskSpec::getId).collect(Collectors.toSet()));
    // runningSubTaskSpecs
    response = task.getRunningSubTaskSpecs(newRequest());
    Assert.assertEquals(200, response.getStatus());
    actualSubTaskSpecMap = (List<SubTaskSpec<SinglePhaseSubTask>>) response.getEntity();
    Assert.assertEquals(runningSpecs.keySet(), actualSubTaskSpecMap.stream().map(SubTaskSpec::getId).collect(Collectors.toSet()));
    // completeSubTaskSpecs
    final List<SubTaskSpec<SinglePhaseSubTask>> completeSubTaskSpecs = expectedSubTaskStateResponses.entrySet().stream().filter(entry -> !runningSpecs.containsKey(entry.getKey())).map(entry -> entry.getValue().getSpec()).collect(Collectors.toList());
    response = task.getCompleteSubTaskSpecs(newRequest());
    Assert.assertEquals(200, response.getStatus());
    Assert.assertEquals(completeSubTaskSpecs, response.getEntity());
    // subTaskSpec
    final String subTaskId = runningSpecs.keySet().iterator().next();
    response = task.getSubTaskSpec(subTaskId, newRequest());
    Assert.assertEquals(200, response.getStatus());
    final SubTaskSpec<SinglePhaseSubTask> subTaskSpec = (SubTaskSpec<SinglePhaseSubTask>) response.getEntity();
    Assert.assertEquals(subTaskId, subTaskSpec.getId());
    // subTaskState
    response = task.getSubTaskState(subTaskId, newRequest());
    Assert.assertEquals(200, response.getStatus());
    final SubTaskSpecStatus expectedResponse = Preconditions.checkNotNull(expectedSubTaskStateResponses.get(subTaskId), "response for task[%s]", subTaskId);
    final SubTaskSpecStatus actualResponse = (SubTaskSpecStatus) response.getEntity();
    Assert.assertEquals(expectedResponse.getSpec().getId(), actualResponse.getSpec().getId());
    Assert.assertEquals(expectedResponse.getCurrentStatus(), actualResponse.getCurrentStatus());
    Assert.assertEquals(expectedResponse.getTaskHistory(), actualResponse.getTaskHistory());
    // completeSubTaskSpecAttemptHistory
    final String completeSubTaskSpecId = expectedSubTaskStateResponses.entrySet().stream().filter(entry -> {
        final TaskStatusPlus currentStatus = entry.getValue().getCurrentStatus();
        return currentStatus != null && (currentStatus.getStatusCode() == TaskState.SUCCESS || currentStatus.getStatusCode() == TaskState.FAILED);
    }).map(Entry::getKey).findFirst().orElse(null);
    if (completeSubTaskSpecId != null) {
        response = task.getCompleteSubTaskSpecAttemptHistory(completeSubTaskSpecId, newRequest());
        Assert.assertEquals(200, response.getStatus());
        Assert.assertEquals(expectedSubTaskStateResponses.get(completeSubTaskSpecId).getTaskHistory(), response.getEntity());
    }
}
Also used : TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) AbstractTask(org.apache.druid.indexing.common.task.AbstractTask) SegmentAllocators(org.apache.druid.indexing.common.task.SegmentAllocators) SubTaskSpecStatus(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus) AbstractInputSource(org.apache.druid.data.input.AbstractInputSource) TaskResource(org.apache.druid.indexing.common.task.TaskResource) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) After(org.junit.After) Map(java.util.Map) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) AuthConfig(org.apache.druid.server.security.AuthConfig) DateTimes(org.apache.druid.java.util.common.DateTimes) Function(com.google.common.base.Function) ImmutableMap(com.google.common.collect.ImmutableMap) InputFormat(org.apache.druid.data.input.InputFormat) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) Collection(java.util.Collection) SplitHintSpec(org.apache.druid.data.input.SplitHintSpec) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) SplittableInputSource(org.apache.druid.data.input.impl.SplittableInputSource) ISE(org.apache.druid.java.util.common.ISE) Collectors(java.util.stream.Collectors) TaskStatusPlus(org.apache.druid.indexer.TaskStatusPlus) SegmentAllocator(org.apache.druid.segment.realtime.appenderator.SegmentAllocator) NoopInputFormat(org.apache.druid.data.input.impl.NoopInputFormat) TaskState(org.apache.druid.indexer.TaskState) List(java.util.List) Stream(java.util.stream.Stream) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) Response(javax.ws.rs.core.Response) DataSegment(org.apache.druid.timeline.DataSegment) Entry(java.util.Map.Entry) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) IntStream(java.util.stream.IntStream) Intervals(org.apache.druid.java.util.common.Intervals) InputSplit(org.apache.druid.data.input.InputSplit) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) HashMap(java.util.HashMap) TaskStatus(org.apache.druid.indexer.TaskStatus) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) AuthenticationResult(org.apache.druid.server.security.AuthenticationResult) HashSet(java.util.HashSet) Interval(org.joda.time.Interval) HttpServletRequest(javax.servlet.http.HttpServletRequest) Nullable(javax.annotation.Nullable) TaskLocation(org.apache.druid.indexer.TaskLocation) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) Test(org.junit.Test) EasyMock(org.easymock.EasyMock) Granularities(org.apache.druid.java.util.common.granularity.Granularities) RunnerTaskState(org.apache.druid.indexer.RunnerTaskState) TimeUnit(java.util.concurrent.TimeUnit) Preconditions(com.google.common.base.Preconditions) Assert(org.junit.Assert) DataSchema(org.apache.druid.segment.indexing.DataSchema) Collections(java.util.Collections) AbstractTask(org.apache.druid.indexing.common.task.AbstractTask) SubTaskSpecStatus(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus) Response(javax.ws.rs.core.Response) Entry(java.util.Map.Entry) Collection(java.util.Collection) TaskStatusPlus(org.apache.druid.indexer.TaskStatusPlus) List(java.util.List) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) ArrayList(java.util.ArrayList)

Aggregations

TaskStatus (org.apache.druid.indexer.TaskStatus)2 TaskToolbox (org.apache.druid.indexing.common.TaskToolbox)2 AbstractTask (org.apache.druid.indexing.common.task.AbstractTask)2 Function (com.google.common.base.Function)1 Preconditions (com.google.common.base.Preconditions)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 ArrayList (java.util.ArrayList)1 Collection (java.util.Collection)1 Collections (java.util.Collections)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Entry (java.util.Map.Entry)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 CopyOnWriteArrayList (java.util.concurrent.CopyOnWriteArrayList)1 TimeUnit (java.util.concurrent.TimeUnit)1 Collectors (java.util.stream.Collectors)1 IntStream (java.util.stream.IntStream)1