Search in sources :

Example 21 with InputRow

use of org.apache.druid.data.input.InputRow in project druid by druid-io.

the class ClosedSegmentsSinksBatchAppenderatorDriverTest method testSimple.

@Test
public void testSimple() throws Exception {
    Assert.assertNull(driver.startJob(null));
    for (InputRow row : ROWS) {
        Assert.assertTrue(driver.add(row, "dummy").isOk());
    }
    checkSegmentStates(2, SegmentState.APPENDING);
    driver.pushAllAndClear(TIMEOUT);
    checkSegmentStates(2, SegmentState.PUSHED_AND_DROPPED);
    final SegmentsAndCommitMetadata published = driver.publishAll(null, null, makeOkPublisher(), Function.identity()).get(TIMEOUT, TimeUnit.MILLISECONDS);
    Assert.assertEquals(ImmutableSet.of(new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0))), published.getSegments().stream().map(SegmentIdWithShardSpec::fromDataSegment).collect(Collectors.toSet()));
    Assert.assertNull(published.getCommitMetadata());
}
Also used : MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) Test(org.junit.Test)

Example 22 with InputRow

use of org.apache.druid.data.input.InputRow in project druid by druid-io.

the class StreamAppenderatorDriverTest method testMaxRowsPerSegment.

@Test
public void testMaxRowsPerSegment() throws Exception {
    final int numSegments = 3;
    final TestCommitterSupplier<Integer> committerSupplier = new TestCommitterSupplier<>();
    Assert.assertNull(driver.startJob(null));
    for (int i = 0; i < numSegments * MAX_ROWS_PER_SEGMENT; i++) {
        committerSupplier.setMetadata(i + 1);
        InputRow row = new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of("dim2", StringUtils.format("bar-%d", i), "met1", 2.0));
        final AppenderatorDriverAddResult addResult = driver.add(row, "dummy", committerSupplier, false, true);
        Assert.assertTrue(addResult.isOk());
        if (addResult.getNumRowsInSegment() > MAX_ROWS_PER_SEGMENT) {
            driver.moveSegmentOut("dummy", ImmutableList.of(addResult.getSegmentIdentifier()));
        }
    }
    final SegmentsAndCommitMetadata published = driver.publish(makeOkPublisher(), committerSupplier.get(), ImmutableList.of("dummy")).get(PUBLISH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
    while (driver.getSegments().containsKey("dummy")) {
        Thread.sleep(100);
    }
    final SegmentsAndCommitMetadata segmentsAndCommitMetadata = driver.registerHandoff(published).get(HANDOFF_CONDITION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
    Assert.assertEquals(numSegments, segmentsAndCommitMetadata.getSegments().size());
    Assert.assertEquals(numSegments * MAX_ROWS_PER_SEGMENT, segmentsAndCommitMetadata.getCommitMetadata());
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Test(org.junit.Test)

Example 23 with InputRow

use of org.apache.druid.data.input.InputRow in project druid by druid-io.

the class TransformerTest method testTransformWithStringTransformOnDoubleColumnTransformAfterCasting.

@Test
public void testTransformWithStringTransformOnDoubleColumnTransformAfterCasting() {
    final Transformer transformer = new Transformer(new TransformSpec(null, ImmutableList.of(new ExpressionTransform("dim", "strlen(dim)", TestExprMacroTable.INSTANCE))));
    final InputRow row = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", 200.5d));
    final InputRow actual = transformer.transform(row);
    Assert.assertNotNull(actual);
    Assert.assertEquals(ImmutableList.of("dim"), actual.getDimensions());
    Assert.assertEquals(5L, actual.getRaw("dim"));
    Assert.assertEquals(row.getTimestamp(), actual.getTimestamp());
}
Also used : MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 24 with InputRow

use of org.apache.druid.data.input.InputRow in project druid by druid-io.

the class TransformerTest method testTransformWithStringTransformOnLongColumnTransformAfterCasting.

@Test
public void testTransformWithStringTransformOnLongColumnTransformAfterCasting() {
    final Transformer transformer = new Transformer(new TransformSpec(null, ImmutableList.of(new ExpressionTransform("dim", "strlen(dim)", TestExprMacroTable.INSTANCE))));
    final InputRow row = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", 10L));
    final InputRow actual = transformer.transform(row);
    Assert.assertNotNull(actual);
    Assert.assertEquals(ImmutableList.of("dim"), actual.getDimensions());
    Assert.assertEquals(2L, actual.getRaw("dim"));
    Assert.assertEquals(row.getTimestamp(), actual.getTimestamp());
}
Also used : MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 25 with InputRow

use of org.apache.druid.data.input.InputRow in project druid by druid-io.

the class TransformerTest method testTransformWithSelectorFilterWithStringBooleanValueOnStringColumn.

@Test
public void testTransformWithSelectorFilterWithStringBooleanValueOnStringColumn() {
    final Transformer transformer = new Transformer(new TransformSpec(new SelectorDimFilter("dim", "false", null), null));
    final InputRow row = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", "false"));
    Assert.assertEquals(row, transformer.transform(row));
    final InputRow row2 = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", "true"));
    Assert.assertNull(transformer.transform(row2));
}
Also used : SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

InputRow (org.apache.druid.data.input.InputRow)266 Test (org.junit.Test)193 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)57 InputEntityReader (org.apache.druid.data.input.InputEntityReader)54 InputRowSchema (org.apache.druid.data.input.InputRowSchema)52 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)52 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)49 ArrayList (java.util.ArrayList)46 List (java.util.List)37 ImmutableList (com.google.common.collect.ImmutableList)33 JSONPathSpec (org.apache.druid.java.util.common.parsers.JSONPathSpec)33 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)33 InputRowListPlusRawValues (org.apache.druid.data.input.InputRowListPlusRawValues)29 File (java.io.File)27 HadoopDruidIndexerConfig (org.apache.druid.indexer.HadoopDruidIndexerConfig)27 JSONPathFieldSpec (org.apache.druid.java.util.common.parsers.JSONPathFieldSpec)27 DateTime (org.joda.time.DateTime)24 Map (java.util.Map)23 IOException (java.io.IOException)18 Interval (org.joda.time.Interval)18