use of org.apache.beam.sdk.io.hadoop.format.HadoopFormatIO.HadoopInputFormatBoundedSource in project beam by apache.
the class HadoopFormatIOReadTest method testReadersStartWhenZeroRecords.
/**
* This test validates behavior of {@link
* HadoopInputFormatBoundedSource.HadoopInputFormatReader#start() start()} method if InputFormat's
* {@link InputFormat#getSplits(JobContext)} getSplits(JobContext)} returns InputSplitList having
* zero records.
*/
@Test
public void testReadersStartWhenZeroRecords() throws Exception {
InputFormat mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
EmployeeRecordReader mockReader = Mockito.mock(EmployeeRecordReader.class);
Mockito.when(mockInputFormat.createRecordReader(Mockito.any(), Mockito.any())).thenReturn(mockReader);
Mockito.when(mockReader.nextKeyValue()).thenReturn(false);
InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource = new HadoopInputFormatBoundedSource<>(serConf, WritableCoder.of(Text.class), AvroCoder.of(Employee.class), // No key translation required.
null, // No value translation required.
null, new SerializableSplit(mockInputSplit), false, false);
boundedSource.setInputFormatObj(mockInputFormat);
BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions());
assertFalse(reader.start());
assertEquals(Double.valueOf(1), reader.getFractionConsumed());
reader.close();
}
Aggregations