Search in sources :

Example 1 with SimpleRowIngestionMeters

use of org.apache.druid.segment.incremental.SimpleRowIngestionMeters in project druid by druid-io.

the class ClosedSegmentsSinksBatchAppenderatorTest method testVerifyRowIngestionMetrics.

@Test
public void testVerifyRowIngestionMetrics() throws Exception {
    final RowIngestionMeters rowIngestionMeters = new SimpleRowIngestionMeters();
    try (final ClosedSegmensSinksBatchAppenderatorTester tester = new ClosedSegmensSinksBatchAppenderatorTester(5, 10000L, null, false, rowIngestionMeters)) {
        final Appenderator appenderator = tester.getAppenderator();
        appenderator.startJob();
        appenderator.add(IDENTIFIERS.get(0), createInputRow("2000", "foo", "invalid_met"), null);
        appenderator.add(IDENTIFIERS.get(0), createInputRow("2000", "foo", 1), null);
        Assert.assertEquals(1, rowIngestionMeters.getProcessed());
        Assert.assertEquals(1, rowIngestionMeters.getProcessedWithError());
        Assert.assertEquals(0, rowIngestionMeters.getUnparseable());
        Assert.assertEquals(0, rowIngestionMeters.getThrownAway());
    }
}
Also used : SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 2 with SimpleRowIngestionMeters

use of org.apache.druid.segment.incremental.SimpleRowIngestionMeters in project druid by druid-io.

the class ClosedSegmentsSinksBatchAppenderatorTest method testCloseContract.

@Test(timeout = 5000L)
public void testCloseContract() throws Exception {
    final RowIngestionMeters rowIngestionMeters = new SimpleRowIngestionMeters();
    try (final ClosedSegmensSinksBatchAppenderatorTester tester = new ClosedSegmensSinksBatchAppenderatorTester(1, 50000L, null, false, rowIngestionMeters)) {
        final Appenderator appenderator = tester.getAppenderator();
        appenderator.startJob();
        // each one of these adds will trigger a persist since maxRowsInMemory is set to one above
        appenderator.add(IDENTIFIERS.get(0), createInputRow("2000", "bar", 1), null);
        appenderator.add(IDENTIFIERS.get(0), createInputRow("2000", "bar2", 1), null);
        // push only a single segment
        ListenableFuture<SegmentsAndCommitMetadata> firstFuture = appenderator.push(Collections.singletonList(IDENTIFIERS.get(0)), null, false);
        // push remaining segments:
        appenderator.add(IDENTIFIERS.get(1), createInputRow("2000", "bar3", 1), null);
        ListenableFuture<SegmentsAndCommitMetadata> secondFuture = appenderator.push(Collections.singletonList(IDENTIFIERS.get(1)), null, false);
        // close should wait for all pushes and persists to end:
        appenderator.close();
        Assert.assertTrue(!firstFuture.isCancelled());
        Assert.assertTrue(!secondFuture.isCancelled());
        Assert.assertTrue(firstFuture.isDone());
        Assert.assertTrue(secondFuture.isDone());
        final SegmentsAndCommitMetadata segmentsAndCommitMetadataForFirstFuture = firstFuture.get();
        final SegmentsAndCommitMetadata segmentsAndCommitMetadataForSecondFuture = secondFuture.get();
        // all segments must have been pushed:
        Assert.assertEquals(segmentsAndCommitMetadataForFirstFuture.getSegments().size(), 1);
        Assert.assertEquals(segmentsAndCommitMetadataForSecondFuture.getSegments().size(), 1);
    }
}
Also used : SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 3 with SimpleRowIngestionMeters

use of org.apache.druid.segment.incremental.SimpleRowIngestionMeters in project druid by druid-io.

the class ClosedSegmentsSinksBatchAppenderatorTest method testMaxBytesInMemoryWithSkipBytesInMemoryOverheadCheckConfig.

@Test
public void testMaxBytesInMemoryWithSkipBytesInMemoryOverheadCheckConfig() throws Exception {
    try (final ClosedSegmensSinksBatchAppenderatorTester tester = new ClosedSegmensSinksBatchAppenderatorTester(100, 1024, null, true, new SimpleRowIngestionMeters(), true)) {
        final Appenderator appenderator = tester.getAppenderator();
        appenderator.startJob();
        appenderator.add(IDENTIFIERS.get(0), createInputRow("2000", "foo", 1), null);
        // expectedSizeInBytes =
        // 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 54 (dimsKeySize) =
        // 182 + 1 byte when null handling is enabled
        int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
        Assert.assertEquals(182 + nullHandlingOverhead, ((BatchAppenderator) appenderator).getBytesInMemory(IDENTIFIERS.get(0)));
        appenderator.add(IDENTIFIERS.get(1), createInputRow("2000", "bar", 1), null);
        Assert.assertEquals(182 + nullHandlingOverhead, ((BatchAppenderator) appenderator).getBytesInMemory(IDENTIFIERS.get(1)));
        appenderator.close();
        Assert.assertEquals(0, ((BatchAppenderator) appenderator).getRowsInMemory());
    }
}
Also used : SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 4 with SimpleRowIngestionMeters

use of org.apache.druid.segment.incremental.SimpleRowIngestionMeters in project druid by druid-io.

the class StreamAppenderatorTest method testVerifyRowIngestionMetrics.

@Test
public void testVerifyRowIngestionMetrics() throws Exception {
    final RowIngestionMeters rowIngestionMeters = new SimpleRowIngestionMeters();
    try (final StreamAppenderatorTester tester = new StreamAppenderatorTester(5, 10000L, null, false, rowIngestionMeters)) {
        final Appenderator appenderator = tester.getAppenderator();
        appenderator.startJob();
        appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", "invalid_met"), Committers.nilSupplier());
        appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), Committers.nilSupplier());
        Assert.assertEquals(1, rowIngestionMeters.getProcessed());
        Assert.assertEquals(1, rowIngestionMeters.getProcessedWithError());
        Assert.assertEquals(0, rowIngestionMeters.getUnparseable());
        Assert.assertEquals(0, rowIngestionMeters.getThrownAway());
    }
}
Also used : SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) RowIngestionMeters(org.apache.druid.segment.incremental.RowIngestionMeters) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 5 with SimpleRowIngestionMeters

use of org.apache.druid.segment.incremental.SimpleRowIngestionMeters in project druid by druid-io.

the class StreamAppenderatorTest method testMaxBytesInMemoryInMultipleSinksWithSkipBytesInMemoryOverheadCheckConfig.

@Test
public void testMaxBytesInMemoryInMultipleSinksWithSkipBytesInMemoryOverheadCheckConfig() throws Exception {
    try (final StreamAppenderatorTester tester = new StreamAppenderatorTester(100, 1024, null, true, new SimpleRowIngestionMeters(), true)) {
        final Appenderator appenderator = tester.getAppenderator();
        final AtomicInteger eventCount = new AtomicInteger(0);
        final Supplier<Committer> committerSupplier = () -> {
            final Object metadata = ImmutableMap.of(eventCount, eventCount.get());
            return new Committer() {

                @Override
                public Object getMetadata() {
                    return metadata;
                }

                @Override
                public void run() {
                // Do nothing
                }
            };
        };
        appenderator.startJob();
        appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier);
        // expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 54 (dimsKeySize) = 182
        int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
        Assert.assertEquals(182 + nullHandlingOverhead, ((StreamAppenderator) appenderator).getBytesCurrentlyInMemory());
        appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier);
        Assert.assertEquals(364 + 2 * nullHandlingOverhead, ((StreamAppenderator) appenderator).getBytesCurrentlyInMemory());
        appenderator.close();
        Assert.assertEquals(0, ((StreamAppenderator) appenderator).getRowsInMemory());
    }
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SimpleRowIngestionMeters(org.apache.druid.segment.incremental.SimpleRowIngestionMeters) Committer(org.apache.druid.data.input.Committer) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

SimpleRowIngestionMeters (org.apache.druid.segment.incremental.SimpleRowIngestionMeters)11 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)10 Test (org.junit.Test)10 RowIngestionMeters (org.apache.druid.segment.incremental.RowIngestionMeters)4 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3 Committer (org.apache.druid.data.input.Committer)3 ParseExceptionHandler (org.apache.druid.segment.incremental.ParseExceptionHandler)1 Before (org.junit.Before)1