use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class DatasourceRecordReaderTest method testSanity.
@Test
public void testSanity() throws Exception {
final DataSegment segment = new DataSegment("testds", Intervals.of("2014-10-22T00:00:00.000Z/2014-10-23T00:00:00.000Z"), "2015-07-15T22:02:40.171Z", ImmutableMap.of("type", "local", "path", this.getClass().getClassLoader().getResource("test-segment/index.zip").getPath()), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), new NumberedShardSpec(0, 1), 9, 4096);
InputSplit split = new DatasourceInputSplit(Collections.singletonList(WindowedDataSegment.of(segment)), null);
Configuration config = new Configuration();
DatasourceInputFormat.addDataSource(config, new DatasourceIngestionSpec(segment.getDataSource(), segment.getInterval(), null, null, null, segment.getDimensions(), segment.getMetrics(), false, null), Collections.emptyList(), 0);
TaskAttemptContext context = EasyMock.createNiceMock(TaskAttemptContext.class);
EasyMock.expect(context.getConfiguration()).andReturn(config).anyTimes();
EasyMock.replay(context);
DatasourceRecordReader rr = new DatasourceRecordReader();
rr.initialize(split, context);
Assert.assertEquals(0, rr.getProgress(), 0.0001);
List<InputRow> rows = new ArrayList<>();
while (rr.nextKeyValue()) {
rows.add(rr.getCurrentValue());
}
verifyRows(rows);
Assert.assertEquals(1, rr.getProgress(), 0.0001);
rr.close();
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class FilteringCloseableInputRowIterator method next.
@Override
public InputRow next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
final InputRow row = next;
next = null;
return row;
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class HashPartitionCachingLocalSegmentAllocatorTest method getSequenceName.
@Test
public void getSequenceName() {
// getSequenceName_forIntervalAndRow_shouldUseISOFormatAndPartitionNumForRow
InputRow row = createInputRow();
String sequenceName = sequenceNameFunction.getSequenceName(INTERVAL, row);
String expectedSequenceName = StringUtils.format("%s_%s_%d", TASKID, INTERVAL, PARTITION_NUM);
Assert.assertEquals(expectedSequenceName, sequenceName);
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class HashPartitionCachingLocalSegmentAllocatorTest method allocatesCorrectShardSpec.
@Test
public void allocatesCorrectShardSpec() throws IOException {
InputRow row = createInputRow();
String sequenceName = sequenceNameFunction.getSequenceName(INTERVAL, row);
SegmentIdWithShardSpec segmentIdWithShardSpec = target.allocate(row, sequenceName, null, false);
Assert.assertEquals(SegmentId.of(DATASOURCE, INTERVAL, VERSION, PARTITION_NUM), segmentIdWithShardSpec.asSegmentId());
HashBucketShardSpec shardSpec = (HashBucketShardSpec) segmentIdWithShardSpec.getShardSpec();
Assert.assertEquals(PARTITION_DIMENSIONS, shardSpec.getPartitionDimensions());
Assert.assertEquals(NUM_PARTITONS, shardSpec.getNumBuckets());
Assert.assertEquals(PARTITION_NUM, shardSpec.getBucketId());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class RangePartitionIndexTaskInputRowIteratorBuilderTest method doesNotInvokeHandlersIfRowValid.
@Test
public void doesNotInvokeHandlersIfRowValid() {
DateTime timestamp = IndexTaskInputRowIteratorBuilderTestingFactory.TIMESTAMP;
List<String> nullDimensionValue = Collections.singletonList(null);
InputRow inputRow = IndexTaskInputRowIteratorBuilderTestingFactory.createInputRow(timestamp, nullDimensionValue);
CloseableIterator<InputRow> inputRowIterator = IndexTaskInputRowIteratorBuilderTestingFactory.createInputRowIterator(inputRow);
GranularitySpec granularitySpec = IndexTaskInputRowIteratorBuilderTestingFactory.createGranularitySpec(timestamp, IndexTaskInputRowIteratorBuilderTestingFactory.PRESENT_BUCKET_INTERVAL_OPT);
List<IndexTaskInputRowIteratorBuilderTestingFactory.HandlerTester.Handler> handlerInvocationHistory = HANDLER_TESTER.invokeHandlers(inputRowIterator, granularitySpec, inputRow);
Assert.assertEquals(Collections.emptyList(), handlerInvocationHistory);
}
Aggregations