use of org.apache.hadoop.mapreduce.JobContext in project hbase by apache.
the class TestRoundRobinTableInputFormat method testConfigureUnconfigure.
/**
* Test that configure/unconfigure set and properly undo the HBASE_REGIONSIZECALCULATOR_ENABLE
* configuration.
*/
@Test
public void testConfigureUnconfigure() {
Configuration configuration = HBaseConfiguration.create();
RoundRobinTableInputFormat rrtif = new RoundRobinTableInputFormat();
rrtif.setConf(configuration);
JobContext jobContext = Mockito.mock(JobContext.class);
Mockito.when(jobContext.getConfiguration()).thenReturn(configuration);
// Assert when done, HBASE_REGIONSIZECALCULATOR_ENABLE is still unset.
configuration.unset(RoundRobinTableInputFormat.HBASE_REGIONSIZECALCULATOR_ENABLE);
rrtif.configure();
rrtif.unconfigure();
String value = configuration.get(RoundRobinTableInputFormat.HBASE_REGIONSIZECALCULATOR_ENABLE);
assertNull(value);
// Assert HBASE_REGIONSIZECALCULATOR_ENABLE is still false when done.
checkRetainsBooleanValue(jobContext, rrtif, false);
// Assert HBASE_REGIONSIZECALCULATOR_ENABLE is still true when done.
checkRetainsBooleanValue(jobContext, rrtif, true);
}
use of org.apache.hadoop.mapreduce.JobContext in project beam by apache.
the class HadoopFormatIOReadTest method testComputeSplitsIfGetSplitsReturnsNullValue.
/**
* This test validates behavior of {@link
* HadoopInputFormatBoundedSource#computeSplitsIfNecessary() computeSplits()} when Hadoop
* InputFormat's {@link InputFormat#getSplits(JobContext)} getSplits(JobContext)} returns NULL
* value.
*/
@Test
public void testComputeSplitsIfGetSplitsReturnsNullValue() throws Exception {
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
SerializableSplit mockInputSplit = Mockito.mock(SerializableSplit.class);
Mockito.when(mockInputFormat.getSplits(Mockito.any(JobContext.class))).thenReturn(null);
HadoopInputFormatBoundedSource<Text, Employee> hifSource = new HadoopInputFormatBoundedSource<>(serConf, WritableCoder.of(Text.class), AvroCoder.of(Employee.class), // No key translation required.
null, // No value translation required.
null, mockInputSplit, false, false);
thrown.expect(IOException.class);
thrown.expectMessage("Error in computing splits, getSplits() returns null.");
hifSource.setInputFormatObj(mockInputFormat);
hifSource.computeSplitsIfNecessary();
}
use of org.apache.hadoop.mapreduce.JobContext in project beam by apache.
the class HadoopFormatIOReadTest method testComputeSplitsIfGetSplitsReturnsListHavingNullValues.
/**
* This test validates behavior of {@link
* HadoopInputFormatBoundedSource#computeSplitsIfNecessary() computeSplits()} if Hadoop
* InputFormat's {@link InputFormat#getSplits(JobContext)} getSplits(JobContext)} returns
* InputSplit list having some null values.
*/
@Test
public void testComputeSplitsIfGetSplitsReturnsListHavingNullValues() throws Exception {
// InputSplit list having null value.
InputSplit mockInputSplit = Mockito.mock(InputSplit.class, Mockito.withSettings().extraInterfaces(Writable.class));
List<InputSplit> inputSplitList = new ArrayList<>();
inputSplitList.add(mockInputSplit);
inputSplitList.add(null);
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
Mockito.when(mockInputFormat.getSplits(Mockito.any(JobContext.class))).thenReturn(inputSplitList);
HadoopInputFormatBoundedSource<Text, Employee> hifSource = new HadoopInputFormatBoundedSource<>(serConf, WritableCoder.of(Text.class), AvroCoder.of(Employee.class), // No key translation required.
null, // No value translation required.
null, new SerializableSplit(), false, false);
thrown.expect(IOException.class);
thrown.expectMessage("Error in computing splits, split is null in InputSplits list populated " + "by getSplits() : ");
hifSource.setInputFormatObj(mockInputFormat);
hifSource.computeSplitsIfNecessary();
}
use of org.apache.hadoop.mapreduce.JobContext in project beam by apache.
the class HadoopFormatIOReadTest method testComputeSplitsIfGetSplitsReturnsEmptyList.
/**
* This test validates behavior of {@link
* HadoopInputFormatBoundedSource#computeSplitsIfNecessary() computeSplits()} when Hadoop
* InputFormat's {@link InputFormat#getSplits(JobContext)} returns empty list.
*/
@Test
public void testComputeSplitsIfGetSplitsReturnsEmptyList() throws Exception {
InputFormat<?, ?> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
SerializableSplit mockInputSplit = Mockito.mock(SerializableSplit.class);
Mockito.when(mockInputFormat.getSplits(Mockito.any(JobContext.class))).thenReturn(new ArrayList<>());
HadoopInputFormatBoundedSource<Text, Employee> hifSource = new HadoopInputFormatBoundedSource<>(serConf, WritableCoder.of(Text.class), AvroCoder.of(Employee.class), // No key translation required.
null, // No value translation required.
null, mockInputSplit, false, false);
thrown.expect(IOException.class);
thrown.expectMessage("Error in computing splits, getSplits() returns a empty list");
hifSource.setInputFormatObj(mockInputFormat);
hifSource.computeSplitsIfNecessary();
}
use of org.apache.hadoop.mapreduce.JobContext in project druid by druid-io.
the class DatasourceInputFormatTest method testGetSplitsUsingDefaultSupplier.
@Test
public void testGetSplitsUsingDefaultSupplier() throws Exception {
// Use the builtin supplier, reading from the local filesystem, rather than testFormatter.
final File tmpFile = temporaryFolder.newFile("something:with:colons");
Files.write("dummy", tmpFile, StandardCharsets.UTF_8);
final ImmutableList<WindowedDataSegment> mySegments = ImmutableList.of(WindowedDataSegment.of(new DataSegment("test1", Intervals.of("2000/3000"), "ver", ImmutableMap.of("type", "local", "path", tmpFile.getPath()), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), NoneShardSpec.instance(), 9, 2)));
final JobConf myConfig = populateConfiguration(new JobConf(), mySegments, 0L);
final JobContext myContext = EasyMock.createMock(JobContext.class);
EasyMock.expect(myContext.getConfiguration()).andReturn(myConfig);
EasyMock.replay(myContext);
final List<InputSplit> splits = new DatasourceInputFormat().getSplits(myContext);
Assert.assertEquals(1, splits.size());
final DatasourceInputSplit theSplit = (DatasourceInputSplit) Iterables.getOnlyElement(splits);
Assert.assertEquals(mySegments.get(0).getSegment().getSize(), theSplit.getLength());
Assert.assertEquals(mySegments, theSplit.getSegments());
Assert.assertArrayEquals(new String[] { "localhost" }, theSplit.getLocations());
}
Aggregations