use of org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat.DataDrivenDBInputSplit in project hadoop by apache.
the class TestDbClasses method testDataDrivenDBInputFormat.
@Test(timeout = 10000)
public void testDataDrivenDBInputFormat() throws Exception {
JobContext jobContext = mock(JobContext.class);
Configuration configuration = new Configuration();
configuration.setInt(MRJobConfig.NUM_MAPS, 1);
when(jobContext.getConfiguration()).thenReturn(configuration);
DataDrivenDBInputFormat<NullDBWritable> format = new DataDrivenDBInputFormat<NullDBWritable>();
List<InputSplit> splits = format.getSplits(jobContext);
assertEquals(1, splits.size());
DataDrivenDBInputSplit split = (DataDrivenDBInputSplit) splits.get(0);
assertEquals("1=1", split.getLowerClause());
assertEquals("1=1", split.getUpperClause());
// 2
configuration.setInt(MRJobConfig.NUM_MAPS, 2);
DataDrivenDBInputFormat.setBoundingQuery(configuration, "query");
assertEquals("query", configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
Job job = mock(Job.class);
when(job.getConfiguration()).thenReturn(configuration);
DataDrivenDBInputFormat.setInput(job, NullDBWritable.class, "query", "Bounding Query");
assertEquals("Bounding Query", configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
}
use of org.apache.hadoop.mapreduce.lib.db.DataDrivenDBInputFormat.DataDrivenDBInputSplit in project hadoop by apache.
the class TestSplitters method assertSplits.
private void assertSplits(String[] expectedSplitRE, List<InputSplit> splits) throws IOException {
assertEquals(expectedSplitRE.length, splits.size());
for (int i = 0; i < expectedSplitRE.length; i++) {
DataDrivenDBInputSplit split = (DataDrivenDBInputSplit) splits.get(i);
String actualExpr = split.getLowerClause() + " " + split.getUpperClause();
assertTrue("Split #" + (i + 1) + " expression is wrong." + " Expected " + expectedSplitRE[i] + " Actual " + actualExpr, Pattern.matches(expectedSplitRE[i], actualExpr));
}
}
Aggregations