use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class ClosedSegmentsSinksBatchAppenderatorDriverTest method testSimple.
@Test
public void testSimple() throws Exception {
Assert.assertNull(driver.startJob(null));
for (InputRow row : ROWS) {
Assert.assertTrue(driver.add(row, "dummy").isOk());
}
checkSegmentStates(2, SegmentState.APPENDING);
driver.pushAllAndClear(TIMEOUT);
checkSegmentStates(2, SegmentState.PUSHED_AND_DROPPED);
final SegmentsAndCommitMetadata published = driver.publishAll(null, null, makeOkPublisher(), Function.identity()).get(TIMEOUT, TimeUnit.MILLISECONDS);
Assert.assertEquals(ImmutableSet.of(new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0))), published.getSegments().stream().map(SegmentIdWithShardSpec::fromDataSegment).collect(Collectors.toSet()));
Assert.assertNull(published.getCommitMetadata());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class StreamAppenderatorDriverTest method testMaxRowsPerSegment.
@Test
public void testMaxRowsPerSegment() throws Exception {
final int numSegments = 3;
final TestCommitterSupplier<Integer> committerSupplier = new TestCommitterSupplier<>();
Assert.assertNull(driver.startJob(null));
for (int i = 0; i < numSegments * MAX_ROWS_PER_SEGMENT; i++) {
committerSupplier.setMetadata(i + 1);
InputRow row = new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of("dim2", StringUtils.format("bar-%d", i), "met1", 2.0));
final AppenderatorDriverAddResult addResult = driver.add(row, "dummy", committerSupplier, false, true);
Assert.assertTrue(addResult.isOk());
if (addResult.getNumRowsInSegment() > MAX_ROWS_PER_SEGMENT) {
driver.moveSegmentOut("dummy", ImmutableList.of(addResult.getSegmentIdentifier()));
}
}
final SegmentsAndCommitMetadata published = driver.publish(makeOkPublisher(), committerSupplier.get(), ImmutableList.of("dummy")).get(PUBLISH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
while (driver.getSegments().containsKey("dummy")) {
Thread.sleep(100);
}
final SegmentsAndCommitMetadata segmentsAndCommitMetadata = driver.registerHandoff(published).get(HANDOFF_CONDITION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
Assert.assertEquals(numSegments, segmentsAndCommitMetadata.getSegments().size());
Assert.assertEquals(numSegments * MAX_ROWS_PER_SEGMENT, segmentsAndCommitMetadata.getCommitMetadata());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class TransformerTest method testTransformWithStringTransformOnDoubleColumnTransformAfterCasting.
@Test
public void testTransformWithStringTransformOnDoubleColumnTransformAfterCasting() {
final Transformer transformer = new Transformer(new TransformSpec(null, ImmutableList.of(new ExpressionTransform("dim", "strlen(dim)", TestExprMacroTable.INSTANCE))));
final InputRow row = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", 200.5d));
final InputRow actual = transformer.transform(row);
Assert.assertNotNull(actual);
Assert.assertEquals(ImmutableList.of("dim"), actual.getDimensions());
Assert.assertEquals(5L, actual.getRaw("dim"));
Assert.assertEquals(row.getTimestamp(), actual.getTimestamp());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class TransformerTest method testTransformWithStringTransformOnLongColumnTransformAfterCasting.
@Test
public void testTransformWithStringTransformOnLongColumnTransformAfterCasting() {
final Transformer transformer = new Transformer(new TransformSpec(null, ImmutableList.of(new ExpressionTransform("dim", "strlen(dim)", TestExprMacroTable.INSTANCE))));
final InputRow row = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", 10L));
final InputRow actual = transformer.transform(row);
Assert.assertNotNull(actual);
Assert.assertEquals(ImmutableList.of("dim"), actual.getDimensions());
Assert.assertEquals(2L, actual.getRaw("dim"));
Assert.assertEquals(row.getTimestamp(), actual.getTimestamp());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class TransformerTest method testTransformWithSelectorFilterWithStringBooleanValueOnStringColumn.
@Test
public void testTransformWithSelectorFilterWithStringBooleanValueOnStringColumn() {
final Transformer transformer = new Transformer(new TransformSpec(new SelectorDimFilter("dim", "false", null), null));
final InputRow row = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", "false"));
Assert.assertEquals(row, transformer.transform(row));
final InputRow row2 = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", "true"));
Assert.assertNull(transformer.transform(row2));
}
Aggregations