use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.
the class OverlordResourceTest method testTaskPostDeniesDatasourceReadUser.
@Test
public void testTaskPostDeniesDatasourceReadUser() {
expectAuthorizationTokenCheck(Users.WIKI_READER);
EasyMock.replay(taskRunner, taskMaster, taskStorageQueryAdapter, indexerMetadataStorageAdapter, req, workerTaskRunnerQueryAdapter);
// Verify that taskPost fails for user who has only datasource read access
Task task = NoopTask.create(Datasources.WIKIPEDIA);
expectedException.expect(ForbiddenException.class);
expectedException.expect(ForbiddenException.class);
overlordResource.taskPost(task, req);
}
use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.
the class OverlordResourceTest method testSecuredTaskPost.
@Test
public void testSecuredTaskPost() {
expectedException.expect(ForbiddenException.class);
expectAuthorizationTokenCheck();
EasyMock.replay(taskRunner, taskMaster, taskStorageQueryAdapter, indexerMetadataStorageAdapter, req, workerTaskRunnerQueryAdapter);
Task task = NoopTask.create();
overlordResource.taskPost(task, req);
}
use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.
the class JavaScriptWorkerSelectStrategyTest method createMockTask.
private Task createMockTask(String type) {
Task mock = EasyMock.createMock(Task.class);
EasyMock.expect(mock.getType()).andReturn(type).anyTimes();
EasyMock.replay(mock);
return mock;
}
use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.
the class WorkerTaskManagerTest method testTaskRun.
@Test(timeout = 60_000L)
public void testTaskRun() throws Exception {
Task task1 = createNoopTask("task1-assigned-via-assign-dir");
Task task2 = createNoopTask("task2-completed-already");
Task task3 = createNoopTask("task3-assigned-explicitly");
FileUtils.mkdirp(workerTaskManager.getAssignedTaskDir());
FileUtils.mkdirp(workerTaskManager.getCompletedTaskDir());
// create a task in assigned task directory, to simulate MM shutdown right after a task was assigned.
jsonMapper.writeValue(new File(workerTaskManager.getAssignedTaskDir(), task1.getId()), task1);
// simulate an already completed task
jsonMapper.writeValue(new File(workerTaskManager.getCompletedTaskDir(), task2.getId()), TaskAnnouncement.create(task2, TaskStatus.success(task2.getId()), location));
workerTaskManager.start();
Assert.assertTrue(workerTaskManager.getCompletedTasks().get(task2.getId()).getTaskStatus().isSuccess());
while (!workerTaskManager.getCompletedTasks().containsKey(task1.getId())) {
Thread.sleep(100);
}
Assert.assertTrue(workerTaskManager.getCompletedTasks().get(task1.getId()).getTaskStatus().isSuccess());
Assert.assertTrue(new File(workerTaskManager.getCompletedTaskDir(), task1.getId()).exists());
Assert.assertFalse(new File(workerTaskManager.getAssignedTaskDir(), task1.getId()).exists());
ChangeRequestsSnapshot<WorkerHistoryItem> baseHistory = workerTaskManager.getChangesSince(new ChangeRequestHistory.Counter(-1, 0)).get();
Assert.assertFalse(baseHistory.isResetCounter());
Assert.assertEquals(3, baseHistory.getRequests().size());
Assert.assertFalse(((WorkerHistoryItem.Metadata) baseHistory.getRequests().get(0)).isDisabled());
WorkerHistoryItem.TaskUpdate baseUpdate1 = (WorkerHistoryItem.TaskUpdate) baseHistory.getRequests().get(1);
WorkerHistoryItem.TaskUpdate baseUpdate2 = (WorkerHistoryItem.TaskUpdate) baseHistory.getRequests().get(2);
Assert.assertTrue(baseUpdate1.getTaskAnnouncement().getTaskStatus().isSuccess());
Assert.assertTrue(baseUpdate2.getTaskAnnouncement().getTaskStatus().isSuccess());
Assert.assertEquals(ImmutableSet.of(task1.getId(), task2.getId()), ImmutableSet.of(baseUpdate1.getTaskAnnouncement().getTaskStatus().getId(), baseUpdate2.getTaskAnnouncement().getTaskStatus().getId()));
// assign another task
workerTaskManager.assignTask(task3);
while (!workerTaskManager.getCompletedTasks().containsKey(task3.getId())) {
Thread.sleep(100);
}
Assert.assertTrue(workerTaskManager.getCompletedTasks().get(task3.getId()).getTaskStatus().isSuccess());
Assert.assertTrue(new File(workerTaskManager.getCompletedTaskDir(), task3.getId()).exists());
Assert.assertFalse(new File(workerTaskManager.getAssignedTaskDir(), task3.getId()).exists());
ChangeRequestsSnapshot<WorkerHistoryItem> changes = workerTaskManager.getChangesSince(baseHistory.getCounter()).get();
Assert.assertFalse(changes.isResetCounter());
Assert.assertEquals(4, changes.getRequests().size());
WorkerHistoryItem.TaskUpdate update1 = (WorkerHistoryItem.TaskUpdate) changes.getRequests().get(0);
Assert.assertEquals(task3.getId(), update1.getTaskAnnouncement().getTaskStatus().getId());
Assert.assertTrue(update1.getTaskAnnouncement().getTaskStatus().isRunnable());
Assert.assertNull(update1.getTaskAnnouncement().getTaskLocation().getHost());
WorkerHistoryItem.TaskUpdate update2 = (WorkerHistoryItem.TaskUpdate) changes.getRequests().get(1);
Assert.assertEquals(task3.getId(), update2.getTaskAnnouncement().getTaskStatus().getId());
Assert.assertTrue(update2.getTaskAnnouncement().getTaskStatus().isRunnable());
Assert.assertNull(update2.getTaskAnnouncement().getTaskLocation().getHost());
WorkerHistoryItem.TaskUpdate update3 = (WorkerHistoryItem.TaskUpdate) changes.getRequests().get(2);
Assert.assertEquals(task3.getId(), update3.getTaskAnnouncement().getTaskStatus().getId());
Assert.assertTrue(update3.getTaskAnnouncement().getTaskStatus().isRunnable());
Assert.assertNotNull(update3.getTaskAnnouncement().getTaskLocation().getHost());
WorkerHistoryItem.TaskUpdate update4 = (WorkerHistoryItem.TaskUpdate) changes.getRequests().get(3);
Assert.assertEquals(task3.getId(), update4.getTaskAnnouncement().getTaskStatus().getId());
Assert.assertTrue(update4.getTaskAnnouncement().getTaskStatus().isSuccess());
Assert.assertNotNull(update4.getTaskAnnouncement().getTaskLocation().getHost());
}
use of org.apache.druid.indexing.common.task.Task in project druid by druid-io.
the class KafkaIndexTaskTest method testMultipleParseExceptionsFailure.
@Test(timeout = 60_000L)
public void testMultipleParseExceptionsFailure() throws Exception {
reportParseExceptions = false;
maxParseExceptions = 2;
maxSavedParseExceptions = 2;
// Insert data
insertData();
final KafkaIndexTask task = createTask(null, new KafkaIndexTaskIOConfig(0, "sequence0", new SeekableStreamStartSequenceNumbers<>(topic, ImmutableMap.of(0, 2L), ImmutableSet.of()), new SeekableStreamEndSequenceNumbers<>(topic, ImmutableMap.of(0, 10L)), kafkaServer.consumerProperties(), KafkaSupervisorIOConfig.DEFAULT_POLL_TIMEOUT_MILLIS, true, null, null, INPUT_FORMAT));
final ListenableFuture<TaskStatus> future = runTask(task);
TaskStatus status = future.get();
// Wait for task to exit
Assert.assertEquals(TaskState.FAILED, status.getStatusCode());
IndexTaskTest.checkTaskStatusErrorMsgForParseExceptionsExceeded(status);
// Check metrics
Assert.assertEquals(3, task.getRunner().getRowIngestionMeters().getProcessed());
Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getProcessedWithError());
Assert.assertEquals(3, task.getRunner().getRowIngestionMeters().getUnparseable());
Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway());
// Check published metadata
Assert.assertEquals(ImmutableList.of(), publishedDescriptors());
Assert.assertNull(newDataSchemaMetadata());
IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
Map<String, Object> expectedMetrics = ImmutableMap.of(RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of(RowIngestionMeters.PROCESSED, 3, RowIngestionMeters.PROCESSED_WITH_ERROR, 0, RowIngestionMeters.UNPARSEABLE, 3, RowIngestionMeters.THROWN_AWAY, 0));
Assert.assertEquals(expectedMetrics, reportData.getRowStats());
List<LinkedHashMap> parseExceptionReports = (List<LinkedHashMap>) reportData.getUnparseableEvents().get(RowIngestionMeters.BUILD_SEGMENTS);
List<String> expectedMessages = Arrays.asList("Unable to parse [] as the intermediateRow resulted in empty input row (Record: 1)", "Unable to parse row [unparseable] (Record: 1)");
List<String> actualMessages = parseExceptionReports.stream().map((r) -> {
return ((List<String>) r.get("details")).get(0);
}).collect(Collectors.toList());
Assert.assertEquals(expectedMessages, actualMessages);
List<String> expectedInputs = Arrays.asList("", "unparseable");
List<String> actualInputs = parseExceptionReports.stream().map((r) -> {
return (String) r.get("input");
}).collect(Collectors.toList());
Assert.assertEquals(expectedInputs, actualInputs);
}
Aggregations