Search in sources :

Example 11 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class RealtimePlumberSchoolTest method testPersistFails.

@Test(timeout = 60000)
public void testPersistFails() throws Exception {
    plumber.getSinks().put(0L, new Sink(new Interval(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(), new DateTime("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
    plumber.startJob();
    final InputRow row = EasyMock.createNiceMock(InputRow.class);
    EasyMock.expect(row.getTimestampFromEpoch()).andReturn(0L);
    EasyMock.expect(row.getDimensions()).andReturn(new ArrayList<String>());
    EasyMock.replay(row);
    plumber.add(row, Suppliers.ofInstance(Committers.nil()));
    final CountDownLatch doneSignal = new CountDownLatch(1);
    plumber.persist(Committers.supplierFromRunnable(new Runnable() {

        @Override
        public void run() {
            doneSignal.countDown();
            throw new RuntimeException();
        }
    }).get());
    doneSignal.await();
    // Exception may need time to propagate
    while (metrics.failedPersists() < 1) {
        Thread.sleep(100);
    }
    Assert.assertEquals(1, metrics.failedPersists());
}
Also used : InputRow(io.druid.data.input.InputRow) CountDownLatch(java.util.concurrent.CountDownLatch) DateTime(org.joda.time.DateTime) Interval(org.joda.time.Interval) FireDepartmentTest(io.druid.segment.realtime.FireDepartmentTest) Test(org.junit.Test)

Example 12 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class FiniteAppenderatorDriverTest method testMaxRowsPerSegment.

@Test
public void testMaxRowsPerSegment() throws Exception {
    final int numSegments = 3;
    final TestCommitterSupplier<Integer> committerSupplier = new TestCommitterSupplier<>();
    Assert.assertNull(driver.startJob());
    for (int i = 0; i < numSegments * MAX_ROWS_PER_SEGMENT; i++) {
        committerSupplier.setMetadata(i + 1);
        InputRow row = new MapBasedInputRow(new DateTime("2000T01"), ImmutableList.of("dim2"), ImmutableMap.<String, Object>of("dim2", String.format("bar-%d", i), "met1", 2.0));
        Assert.assertNotNull(driver.add(row, "dummy", committerSupplier));
    }
    final SegmentsAndMetadata segmentsAndMetadata = driver.finish(makeOkPublisher(), committerSupplier.get());
    Assert.assertEquals(numSegments, segmentsAndMetadata.getSegments().size());
    Assert.assertEquals(numSegments * MAX_ROWS_PER_SEGMENT, segmentsAndMetadata.getCommitMetadata());
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) InputRow(io.druid.data.input.InputRow) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) DateTime(org.joda.time.DateTime) Test(org.junit.Test)

Example 13 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class CombiningFirehoseFactoryTest method testCombiningfirehose.

@Test
public void testCombiningfirehose() throws IOException {
    List<InputRow> list1 = Arrays.asList(makeRow(1, 1), makeRow(2, 2));
    List<InputRow> list2 = Arrays.asList(makeRow(3, 3), makeRow(4, 4), makeRow(5, 5));
    FirehoseFactory combiningFactory = new CombiningFirehoseFactory(Arrays.<FirehoseFactory>asList(new ListFirehoseFactory(list1), new ListFirehoseFactory(list2)));
    final Firehose firehose = combiningFactory.connect(null);
    for (int i = 1; i < 6; i++) {
        Assert.assertTrue(firehose.hasMore());
        final InputRow inputRow = firehose.nextRow();
        Assert.assertEquals(i, inputRow.getTimestampFromEpoch());
        Assert.assertEquals(i, inputRow.getFloatMetric("test"), 0);
    }
    Assert.assertFalse(firehose.hasMore());
}
Also used : CombiningFirehoseFactory(io.druid.segment.realtime.firehose.CombiningFirehoseFactory) FirehoseFactory(io.druid.data.input.FirehoseFactory) Firehose(io.druid.data.input.Firehose) InputRow(io.druid.data.input.InputRow) CombiningFirehoseFactory(io.druid.segment.realtime.firehose.CombiningFirehoseFactory) Test(org.junit.Test)

Example 14 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class ReplayableFirehoseFactoryTest method testReplayableFirehoseWithConnectRetries.

@Test
public void testReplayableFirehoseWithConnectRetries() throws Exception {
    final boolean[] hasMore = { true };
    expect(delegateFactory.connect(parser)).andThrow(new IOException()).andReturn(delegateFirehose);
    expect(delegateFirehose.hasMore()).andAnswer(new IAnswer<Boolean>() {

        @Override
        public Boolean answer() throws Throwable {
            return hasMore[0];
        }
    }).anyTimes();
    expect(delegateFirehose.nextRow()).andReturn(testRows.get(0)).andReturn(testRows.get(1)).andAnswer(new IAnswer<InputRow>() {

        @Override
        public InputRow answer() throws Throwable {
            hasMore[0] = false;
            return testRows.get(2);
        }
    });
    delegateFirehose.close();
    replayAll();
    List<InputRow> rows = Lists.newArrayList();
    try (Firehose firehose = replayableFirehoseFactory.connect(parser)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    Assert.assertEquals(testRows, rows);
    verifyAll();
}
Also used : IAnswer(org.easymock.IAnswer) Firehose(io.druid.data.input.Firehose) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) InputRow(io.druid.data.input.InputRow) IOException(java.io.IOException) Test(org.junit.Test)

Example 15 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class ReplayableFirehoseFactoryTest method testReplayableFirehoseWithNextRowRetries.

@Test
public void testReplayableFirehoseWithNextRowRetries() throws Exception {
    final boolean[] hasMore = { true };
    expect(delegateFactory.connect(parser)).andReturn(delegateFirehose).times(2);
    expect(delegateFirehose.hasMore()).andAnswer(new IAnswer<Boolean>() {

        @Override
        public Boolean answer() throws Throwable {
            return hasMore[0];
        }
    }).anyTimes();
    expect(delegateFirehose.nextRow()).andReturn(testRows.get(0)).andThrow(new RuntimeException()).andReturn(testRows.get(0)).andReturn(testRows.get(1)).andAnswer(new IAnswer<InputRow>() {

        @Override
        public InputRow answer() throws Throwable {
            hasMore[0] = false;
            return testRows.get(2);
        }
    });
    delegateFirehose.close();
    expectLastCall().times(2);
    replayAll();
    List<InputRow> rows = Lists.newArrayList();
    try (Firehose firehose = replayableFirehoseFactory.connect(parser)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    Assert.assertEquals(testRows, rows);
    verifyAll();
}
Also used : IAnswer(org.easymock.IAnswer) Firehose(io.druid.data.input.Firehose) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) InputRow(io.druid.data.input.InputRow) Test(org.junit.Test)

Aggregations

InputRow (io.druid.data.input.InputRow)81 Test (org.junit.Test)35 MapBasedInputRow (io.druid.data.input.MapBasedInputRow)24 BenchmarkDataGenerator (io.druid.benchmark.datagen.BenchmarkDataGenerator)22 File (java.io.File)18 Setup (org.openjdk.jmh.annotations.Setup)15 HyperUniquesSerde (io.druid.query.aggregation.hyperloglog.HyperUniquesSerde)14 Firehose (io.druid.data.input.Firehose)12 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)12 IndexSpec (io.druid.segment.IndexSpec)11 ArrayList (java.util.ArrayList)11 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)10 DateTime (org.joda.time.DateTime)10 QueryableIndex (io.druid.segment.QueryableIndex)9 IOException (java.io.IOException)9 BenchmarkColumnSchema (io.druid.benchmark.datagen.BenchmarkColumnSchema)8 Interval (org.joda.time.Interval)8 ParseException (io.druid.java.util.common.parsers.ParseException)7 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)6 DataSegment (io.druid.timeline.DataSegment)5