Search in sources :

Example 1 with SubTaskSpecStatus

use of org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus in project druid by druid-io.

the class ParallelIndexSupervisorTaskResourceTest method buildStateMap.

private Map<String, SubTaskSpecStatus> buildStateMap() {
    final Map<String, SubTaskSpecStatus> stateMap = new HashMap<>();
    subTaskSpecs.forEach((specId, spec) -> {
        final List<TaskStatusPlus> taskHistory = taskHistories.get(specId);
        final TaskStatusPlus runningTaskStatus = runningSpecs.get(specId);
        stateMap.put(specId, new SubTaskSpecStatus(spec, runningTaskStatus, taskHistory == null ? Collections.emptyList() : taskHistory));
    });
    return stateMap;
}
Also used : ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) TaskStatusPlus(org.apache.druid.indexer.TaskStatusPlus) SubTaskSpecStatus(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus)

Example 2 with SubTaskSpecStatus

use of org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus in project druid by druid-io.

the class ParallelIndexSupervisorTask method getSubTaskState.

@GET
@Path("/subtaskspec/{id}/state")
@Produces(MediaType.APPLICATION_JSON)
public Response getSubTaskState(@PathParam("id") String id, @Context final HttpServletRequest req) {
    IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
    final ParallelIndexTaskRunner currentRunner = getCurrentRunner();
    if (currentRunner == null) {
        return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("task is not running yet").build();
    } else {
        final SubTaskSpecStatus subTaskSpecStatus = currentRunner.getSubTaskState(id);
        if (subTaskSpecStatus == null) {
            return Response.status(Response.Status.NOT_FOUND).build();
        } else {
            return Response.ok(subTaskSpecStatus).build();
        }
    }
}
Also used : SubTaskSpecStatus(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 3 with SubTaskSpecStatus

use of org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus in project druid by druid-io.

the class ParallelIndexSupervisorTaskResourceTest method checkState.

/**
 * Test all endpoints of {@link ParallelIndexSupervisorTask}.
 */
private void checkState(int expectedSucceededTasks, int expectedFailedTask, // subTaskSpecId -> response
Map<String, SubTaskSpecStatus> expectedSubTaskStateResponses) {
    Response response = task.getProgress(newRequest());
    Assert.assertEquals(200, response.getStatus());
    final ParallelIndexingPhaseProgress monitorStatus = (ParallelIndexingPhaseProgress) response.getEntity();
    // numRunningTasks
    Assert.assertEquals(runningTasks.size(), monitorStatus.getRunning());
    // numSucceededTasks
    Assert.assertEquals(expectedSucceededTasks, monitorStatus.getSucceeded());
    // numFailedTasks
    Assert.assertEquals(expectedFailedTask, monitorStatus.getFailed());
    // numCompleteTasks
    Assert.assertEquals(expectedSucceededTasks + expectedFailedTask, monitorStatus.getComplete());
    // numTotalTasks
    Assert.assertEquals(runningTasks.size() + expectedSucceededTasks + expectedFailedTask, monitorStatus.getTotal());
    // runningSubTasks
    response = task.getRunningTasks(newRequest());
    Assert.assertEquals(200, response.getStatus());
    Assert.assertEquals(runningTasks.stream().map(AbstractTask::getId).collect(Collectors.toSet()), new HashSet<>((Collection<String>) response.getEntity()));
    // subTaskSpecs
    response = task.getSubTaskSpecs(newRequest());
    Assert.assertEquals(200, response.getStatus());
    List<SubTaskSpec<SinglePhaseSubTask>> actualSubTaskSpecMap = (List<SubTaskSpec<SinglePhaseSubTask>>) response.getEntity();
    Assert.assertEquals(subTaskSpecs.keySet(), actualSubTaskSpecMap.stream().map(SubTaskSpec::getId).collect(Collectors.toSet()));
    // runningSubTaskSpecs
    response = task.getRunningSubTaskSpecs(newRequest());
    Assert.assertEquals(200, response.getStatus());
    actualSubTaskSpecMap = (List<SubTaskSpec<SinglePhaseSubTask>>) response.getEntity();
    Assert.assertEquals(runningSpecs.keySet(), actualSubTaskSpecMap.stream().map(SubTaskSpec::getId).collect(Collectors.toSet()));
    // completeSubTaskSpecs
    final List<SubTaskSpec<SinglePhaseSubTask>> completeSubTaskSpecs = expectedSubTaskStateResponses.entrySet().stream().filter(entry -> !runningSpecs.containsKey(entry.getKey())).map(entry -> entry.getValue().getSpec()).collect(Collectors.toList());
    response = task.getCompleteSubTaskSpecs(newRequest());
    Assert.assertEquals(200, response.getStatus());
    Assert.assertEquals(completeSubTaskSpecs, response.getEntity());
    // subTaskSpec
    final String subTaskId = runningSpecs.keySet().iterator().next();
    response = task.getSubTaskSpec(subTaskId, newRequest());
    Assert.assertEquals(200, response.getStatus());
    final SubTaskSpec<SinglePhaseSubTask> subTaskSpec = (SubTaskSpec<SinglePhaseSubTask>) response.getEntity();
    Assert.assertEquals(subTaskId, subTaskSpec.getId());
    // subTaskState
    response = task.getSubTaskState(subTaskId, newRequest());
    Assert.assertEquals(200, response.getStatus());
    final SubTaskSpecStatus expectedResponse = Preconditions.checkNotNull(expectedSubTaskStateResponses.get(subTaskId), "response for task[%s]", subTaskId);
    final SubTaskSpecStatus actualResponse = (SubTaskSpecStatus) response.getEntity();
    Assert.assertEquals(expectedResponse.getSpec().getId(), actualResponse.getSpec().getId());
    Assert.assertEquals(expectedResponse.getCurrentStatus(), actualResponse.getCurrentStatus());
    Assert.assertEquals(expectedResponse.getTaskHistory(), actualResponse.getTaskHistory());
    // completeSubTaskSpecAttemptHistory
    final String completeSubTaskSpecId = expectedSubTaskStateResponses.entrySet().stream().filter(entry -> {
        final TaskStatusPlus currentStatus = entry.getValue().getCurrentStatus();
        return currentStatus != null && (currentStatus.getStatusCode() == TaskState.SUCCESS || currentStatus.getStatusCode() == TaskState.FAILED);
    }).map(Entry::getKey).findFirst().orElse(null);
    if (completeSubTaskSpecId != null) {
        response = task.getCompleteSubTaskSpecAttemptHistory(completeSubTaskSpecId, newRequest());
        Assert.assertEquals(200, response.getStatus());
        Assert.assertEquals(expectedSubTaskStateResponses.get(completeSubTaskSpecId).getTaskHistory(), response.getEntity());
    }
}
Also used : TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) AbstractTask(org.apache.druid.indexing.common.task.AbstractTask) SegmentAllocators(org.apache.druid.indexing.common.task.SegmentAllocators) SubTaskSpecStatus(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus) AbstractInputSource(org.apache.druid.data.input.AbstractInputSource) TaskResource(org.apache.druid.indexing.common.task.TaskResource) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) After(org.junit.After) Map(java.util.Map) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) AuthConfig(org.apache.druid.server.security.AuthConfig) DateTimes(org.apache.druid.java.util.common.DateTimes) Function(com.google.common.base.Function) ImmutableMap(com.google.common.collect.ImmutableMap) InputFormat(org.apache.druid.data.input.InputFormat) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) Collection(java.util.Collection) SplitHintSpec(org.apache.druid.data.input.SplitHintSpec) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) SplittableInputSource(org.apache.druid.data.input.impl.SplittableInputSource) ISE(org.apache.druid.java.util.common.ISE) Collectors(java.util.stream.Collectors) TaskStatusPlus(org.apache.druid.indexer.TaskStatusPlus) SegmentAllocator(org.apache.druid.segment.realtime.appenderator.SegmentAllocator) NoopInputFormat(org.apache.druid.data.input.impl.NoopInputFormat) TaskState(org.apache.druid.indexer.TaskState) List(java.util.List) Stream(java.util.stream.Stream) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) Response(javax.ws.rs.core.Response) DataSegment(org.apache.druid.timeline.DataSegment) Entry(java.util.Map.Entry) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) IntStream(java.util.stream.IntStream) Intervals(org.apache.druid.java.util.common.Intervals) InputSplit(org.apache.druid.data.input.InputSplit) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) HashMap(java.util.HashMap) TaskStatus(org.apache.druid.indexer.TaskStatus) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) AuthenticationResult(org.apache.druid.server.security.AuthenticationResult) HashSet(java.util.HashSet) Interval(org.joda.time.Interval) HttpServletRequest(javax.servlet.http.HttpServletRequest) Nullable(javax.annotation.Nullable) TaskLocation(org.apache.druid.indexer.TaskLocation) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) Test(org.junit.Test) EasyMock(org.easymock.EasyMock) Granularities(org.apache.druid.java.util.common.granularity.Granularities) RunnerTaskState(org.apache.druid.indexer.RunnerTaskState) TimeUnit(java.util.concurrent.TimeUnit) Preconditions(com.google.common.base.Preconditions) Assert(org.junit.Assert) DataSchema(org.apache.druid.segment.indexing.DataSchema) Collections(java.util.Collections) AbstractTask(org.apache.druid.indexing.common.task.AbstractTask) SubTaskSpecStatus(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus) Response(javax.ws.rs.core.Response) Entry(java.util.Map.Entry) Collection(java.util.Collection) TaskStatusPlus(org.apache.druid.indexer.TaskStatusPlus) List(java.util.List) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) ArrayList(java.util.ArrayList)

Aggregations

SubTaskSpecStatus (org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTaskRunner.SubTaskSpecStatus)3 HashMap (java.util.HashMap)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 TaskStatusPlus (org.apache.druid.indexer.TaskStatusPlus)2 Function (com.google.common.base.Function)1 Preconditions (com.google.common.base.Preconditions)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 ArrayList (java.util.ArrayList)1 Collection (java.util.Collection)1 Collections (java.util.Collections)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Entry (java.util.Map.Entry)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 CopyOnWriteArrayList (java.util.concurrent.CopyOnWriteArrayList)1 TimeUnit (java.util.concurrent.TimeUnit)1 Collectors (java.util.stream.Collectors)1 IntStream (java.util.stream.IntStream)1 Stream (java.util.stream.Stream)1