Search in sources :

Example 6 with RowIngestionMetersTotals

use of org.apache.druid.segment.incremental.RowIngestionMetersTotals in project druid by druid-io.

the class SinglePhaseParallelIndexingTest method getExpectedTaskReportSequential.

private Map<String, Object> getExpectedTaskReportSequential(String taskId, List<ParseExceptionReport> expectedUnparseableEvents, RowIngestionMetersTotals expectedTotals) {
    Map<String, Object> returnMap = new HashMap<>();
    Map<String, Object> ingestionStatsAndErrors = new HashMap<>();
    Map<String, Object> payload = new HashMap<>();
    payload.put("ingestionState", IngestionState.COMPLETED);
    payload.put("unparseableEvents", ImmutableMap.of("determinePartitions", ImmutableList.of(), "buildSegments", expectedUnparseableEvents));
    Map<String, Object> emptyAverageMinuteMap = ImmutableMap.of("processed", 0.0, "unparseable", 0.0, "thrownAway", 0.0, "processedWithError", 0.0);
    Map<String, Object> emptyAverages = ImmutableMap.of("1m", emptyAverageMinuteMap, "5m", emptyAverageMinuteMap, "15m", emptyAverageMinuteMap);
    payload.put("rowStats", ImmutableMap.of("movingAverages", ImmutableMap.of("determinePartitions", emptyAverages, "buildSegments", emptyAverages), "totals", ImmutableMap.of("determinePartitions", new RowIngestionMetersTotals(0, 0, 0, 0), "buildSegments", expectedTotals)));
    ingestionStatsAndErrors.put("taskId", taskId);
    ingestionStatsAndErrors.put("payload", payload);
    ingestionStatsAndErrors.put("type", "ingestionStatsAndErrors");
    returnMap.put("ingestionStatsAndErrors", ingestionStatsAndErrors);
    return returnMap;
}
Also used : HashMap(java.util.HashMap) RowIngestionMetersTotals(org.apache.druid.segment.incremental.RowIngestionMetersTotals)

Example 7 with RowIngestionMetersTotals

use of org.apache.druid.segment.incremental.RowIngestionMetersTotals in project druid by druid-io.

the class KafkaIndexTaskTest method testRunAfterDataInsertedLiveReport.

@Test(timeout = 60_000L)
public void testRunAfterDataInsertedLiveReport() throws Exception {
    // Insert data
    insertData();
    final KafkaIndexTask task = createTask(null, new KafkaIndexTaskIOConfig(0, "sequence0", new SeekableStreamStartSequenceNumbers<>(topic, ImmutableMap.of(0, 2L), ImmutableSet.of()), new SeekableStreamEndSequenceNumbers<>(topic, ImmutableMap.of(0, 12L)), kafkaServer.consumerProperties(), KafkaSupervisorIOConfig.DEFAULT_POLL_TIMEOUT_MILLIS, true, null, null, INPUT_FORMAT));
    final ListenableFuture<TaskStatus> future = runTask(task);
    SeekableStreamIndexTaskRunner runner = task.getRunner();
    while (true) {
        Thread.sleep(1000);
        if (runner.getStatus() == Status.PUBLISHING) {
            break;
        }
    }
    Map rowStats = runner.doGetRowStats();
    Map totals = (Map) rowStats.get("totals");
    RowIngestionMetersTotals buildSegments = (RowIngestionMetersTotals) totals.get("buildSegments");
    Map movingAverages = (Map) rowStats.get("movingAverages");
    Map buildSegments2 = (Map) movingAverages.get("buildSegments");
    HashMap avg_1min = (HashMap) buildSegments2.get("1m");
    HashMap avg_5min = (HashMap) buildSegments2.get("5m");
    HashMap avg_15min = (HashMap) buildSegments2.get("15m");
    runner.resume();
    // Check metrics
    Assert.assertEquals(buildSegments.getProcessed(), task.getRunner().getRowIngestionMeters().getProcessed());
    Assert.assertEquals(buildSegments.getUnparseable(), task.getRunner().getRowIngestionMeters().getUnparseable());
    Assert.assertEquals(buildSegments.getThrownAway(), task.getRunner().getRowIngestionMeters().getThrownAway());
    Assert.assertEquals(avg_1min.get("processed"), 0.0);
    Assert.assertEquals(avg_5min.get("processed"), 0.0);
    Assert.assertEquals(avg_15min.get("processed"), 0.0);
    // Wait for task to exit
    Assert.assertEquals(TaskState.SUCCESS, future.get().getStatusCode());
}
Also used : LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) SeekableStreamStartSequenceNumbers(org.apache.druid.indexing.seekablestream.SeekableStreamStartSequenceNumbers) RowIngestionMetersTotals(org.apache.druid.segment.incremental.RowIngestionMetersTotals) TaskStatus(org.apache.druid.indexer.TaskStatus) SeekableStreamIndexTaskRunner(org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskRunner) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) SeekableStreamEndSequenceNumbers(org.apache.druid.indexing.seekablestream.SeekableStreamEndSequenceNumbers) Test(org.junit.Test) IndexTaskTest(org.apache.druid.indexing.common.task.IndexTaskTest)

Aggregations

RowIngestionMetersTotals (org.apache.druid.segment.incremental.RowIngestionMetersTotals)7 HashMap (java.util.HashMap)3 ParseExceptionReport (org.apache.druid.segment.incremental.ParseExceptionReport)3 Test (org.junit.Test)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 IOException (java.io.IOException)2 Map (java.util.Map)2 TreeMap (java.util.TreeMap)2 Closeable (java.io.Closeable)1 ArrayList (java.util.ArrayList)1 LinkedHashMap (java.util.LinkedHashMap)1 List (java.util.List)1 TaskStatus (org.apache.druid.indexer.TaskStatus)1 IngestionStatsAndErrorsTaskReport (org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport)1 IngestionStatsAndErrorsTaskReportData (org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData)1 TaskReport (org.apache.druid.indexing.common.TaskReport)1 IndexTaskTest (org.apache.druid.indexing.common.task.IndexTaskTest)1 MaxAllowedLocksExceededException (org.apache.druid.indexing.common.task.batch.MaxAllowedLocksExceededException)1 SeekableStreamEndSequenceNumbers (org.apache.druid.indexing.seekablestream.SeekableStreamEndSequenceNumbers)1 SeekableStreamIndexTaskRunner (org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskRunner)1