use of org.apache.druid.indexer.TaskStatus in project druid by druid-io.
the class RealtimeIndexTaskTest method testReportParseExceptionsOnBadMetric.
@Test(timeout = 60_000L)
public void testReportParseExceptionsOnBadMetric() throws Exception {
final TestIndexerMetadataStorageCoordinator mdc = new TestIndexerMetadataStorageCoordinator();
final RealtimeIndexTask task = makeRealtimeTask(null, true);
final TaskToolbox taskToolbox = makeToolbox(task, mdc, tempFolder.newFolder());
final ListenableFuture<TaskStatus> statusFuture = runTask(task, taskToolbox);
// Wait for firehose to show up, it starts off null.
while (task.getFirehose() == null) {
Thread.sleep(50);
}
final TestFirehose firehose = (TestFirehose) task.getFirehose();
firehose.addRows(ImmutableList.of(ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "1"), ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "foo"), ImmutableMap.of("t", now.minus(new Period("P1D")).getMillis(), "dim1", "foo", "met1", "foo"), ImmutableMap.of("t", now.getMillis(), "dim2", "bar", "met1", 2.0)));
// Stop the firehose, this will drain out existing events.
firehose.close();
// Wait for the task to finish.
expectedException.expect(ExecutionException.class);
expectedException.expectCause(CoreMatchers.instanceOf(ParseException.class));
expectedException.expectCause(ThrowableMessageMatcher.hasMessage(CoreMatchers.containsString("[Unable to parse value[foo] for field[met1]")));
statusFuture.get();
}
use of org.apache.druid.indexer.TaskStatus in project druid by druid-io.
the class RealtimeIndexTaskTest method testStopBeforeStarting.
@Test(timeout = 60_000L)
public void testStopBeforeStarting() throws Exception {
final File directory = tempFolder.newFolder();
final RealtimeIndexTask task1 = makeRealtimeTask(null);
final TestIndexerMetadataStorageCoordinator mdc = new TestIndexerMetadataStorageCoordinator();
final TaskToolbox taskToolbox = makeToolbox(task1, mdc, directory);
task1.stopGracefully(taskToolbox.getConfig());
final ListenableFuture<TaskStatus> statusFuture = runTask(task1, taskToolbox);
// Wait for the task to finish.
final TaskStatus taskStatus = statusFuture.get();
Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode());
}
use of org.apache.druid.indexer.TaskStatus in project druid by druid-io.
the class CompactionTaskRunTest method testWithGranularitySpecNonNullSegmentGranularityAndNullQueryGranularity.
@Test
public void testWithGranularitySpecNonNullSegmentGranularityAndNullQueryGranularity() throws Exception {
runIndexTask();
final Builder builder = new Builder(DATA_SOURCE, segmentCacheManagerFactory, RETRY_POLICY_FACTORY);
// day segmentGranularity
final CompactionTask compactionTask1 = builder.interval(Intervals.of("2014-01-01/2014-01-02")).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.DAY, null, null)).build();
Pair<TaskStatus, List<DataSegment>> resultPair = runTask(compactionTask1);
Assert.assertTrue(resultPair.lhs.isSuccess());
List<DataSegment> segments = resultPair.rhs;
Assert.assertEquals(1, segments.size());
Assert.assertEquals(Intervals.of("2014-01-01/2014-01-02"), segments.get(0).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(0).getShardSpec());
Assert.assertEquals(getDefaultCompactionState(Granularities.DAY, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00"))), segments.get(0).getLastCompactionState());
// hour segmentGranularity
final CompactionTask compactionTask2 = builder.interval(Intervals.of("2014-01-01/2014-01-02")).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.HOUR, null, null)).build();
resultPair = runTask(compactionTask2);
Assert.assertTrue(resultPair.lhs.isSuccess());
segments = resultPair.rhs;
Assert.assertEquals(3, segments.size());
for (int i = 0; i < 3; i++) {
Assert.assertEquals(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1), segments.get(i).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(i).getShardSpec());
Assert.assertEquals(getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01/2014-01-02"))), segments.get(i).getLastCompactionState());
}
}
use of org.apache.druid.indexer.TaskStatus in project druid by druid-io.
the class CompactionTaskRunTest method testWithSegmentGranularity.
@Test
public void testWithSegmentGranularity() throws Exception {
runIndexTask();
final Builder builder = new Builder(DATA_SOURCE, segmentCacheManagerFactory, RETRY_POLICY_FACTORY);
// day segmentGranularity
final CompactionTask compactionTask1 = builder.interval(Intervals.of("2014-01-01/2014-01-02")).segmentGranularity(Granularities.DAY).build();
Pair<TaskStatus, List<DataSegment>> resultPair = runTask(compactionTask1);
Assert.assertTrue(resultPair.lhs.isSuccess());
List<DataSegment> segments = resultPair.rhs;
Assert.assertEquals(1, segments.size());
Assert.assertEquals(Intervals.of("2014-01-01/2014-01-02"), segments.get(0).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(0).getShardSpec());
Assert.assertEquals(getDefaultCompactionState(Granularities.DAY, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00"))), segments.get(0).getLastCompactionState());
// hour segmentGranularity
final CompactionTask compactionTask2 = builder.interval(Intervals.of("2014-01-01/2014-01-02")).segmentGranularity(Granularities.HOUR).build();
resultPair = runTask(compactionTask2);
Assert.assertTrue(resultPair.lhs.isSuccess());
segments = resultPair.rhs;
Assert.assertEquals(3, segments.size());
for (int i = 0; i < 3; i++) {
Assert.assertEquals(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1), segments.get(i).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(i).getShardSpec());
Assert.assertEquals(getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01/2014-01-02"))), segments.get(i).getLastCompactionState());
}
}
use of org.apache.druid.indexer.TaskStatus in project druid by druid-io.
the class CompactionTaskRunTest method testWithGranularitySpecNonNullQueryGranularityAndNonNullSegmentGranularity.
@Test
public void testWithGranularitySpecNonNullQueryGranularityAndNonNullSegmentGranularity() throws Exception {
runIndexTask();
final Builder builder = new Builder(DATA_SOURCE, segmentCacheManagerFactory, RETRY_POLICY_FACTORY);
// day segmentGranularity and day queryGranularity
final CompactionTask compactionTask1 = builder.interval(Intervals.of("2014-01-01/2014-01-02")).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.DAY, null)).build();
Pair<TaskStatus, List<DataSegment>> resultPair = runTask(compactionTask1);
Assert.assertTrue(resultPair.lhs.isSuccess());
List<DataSegment> segments = resultPair.rhs;
Assert.assertEquals(1, segments.size());
Assert.assertEquals(Intervals.of("2014-01-01/2014-01-02"), segments.get(0).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(0).getShardSpec());
Assert.assertEquals(getDefaultCompactionState(Granularities.DAY, Granularities.DAY, ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00"))), segments.get(0).getLastCompactionState());
}
Aggregations