use of org.apache.hadoop.mapred.JobConf in project whirr by apache.
the class HadoopServiceTest method test.
@Test
public void test() throws Exception {
Configuration conf = getConfiguration();
JobConf job = new JobConf(conf, HadoopServiceTest.class);
JobClient client = new JobClient(job);
waitForTaskTrackers(client);
FileSystem fs = FileSystem.get(conf);
OutputStream os = fs.create(new Path("input"));
Writer wr = new OutputStreamWriter(os);
wr.write("b a\n");
wr.close();
job.setMapperClass(TokenCountMapper.class);
job.setReducerClass(LongSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
FileInputFormat.setInputPaths(job, new Path("input"));
FileOutputFormat.setOutputPath(job, new Path("output"));
JobClient.runJob(job);
FSDataInputStream in = fs.open(new Path("output/part-00000"));
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
assertEquals("a\t1", reader.readLine());
assertEquals("b\t1", reader.readLine());
assertNull(reader.readLine());
reader.close();
}
use of org.apache.hadoop.mapred.JobConf in project hbase by apache.
the class TestTableSnapshotInputFormat method testWithMockedMapReduce.
@Override
protected void testWithMockedMapReduce(HBaseTestingUtility util, String snapshotName, int numRegions, int expectedNumSplits) throws Exception {
setupCluster();
final TableName tableName = TableName.valueOf(name.getMethodName());
try {
createTableAndSnapshot(util, tableName, snapshotName, getStartRow(), getEndRow(), numRegions);
JobConf job = new JobConf(util.getConfiguration());
Path tmpTableDir = util.getDataTestDirOnTestFS(snapshotName);
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName, COLUMNS, TestTableSnapshotMapper.class, ImmutableBytesWritable.class, NullWritable.class, job, false, tmpTableDir);
// mapred doesn't support start and end keys? o.O
verifyWithMockedMapReduce(job, numRegions, expectedNumSplits, getStartRow(), getEndRow());
} finally {
util.getAdmin().deleteSnapshot(snapshotName);
util.deleteTable(tableName);
tearDownCluster();
}
}
use of org.apache.hadoop.mapred.JobConf in project hbase by apache.
the class TestTableSnapshotInputFormat method doTestWithMapReduce.
// this is also called by the IntegrationTestTableSnapshotInputFormat
public static void doTestWithMapReduce(HBaseTestingUtility util, TableName tableName, String snapshotName, byte[] startRow, byte[] endRow, Path tableDir, int numRegions, int expectedNumSplits, boolean shutdownCluster) throws Exception {
//create the table and snapshot
createTableAndSnapshot(util, tableName, snapshotName, startRow, endRow, numRegions);
if (shutdownCluster) {
util.shutdownMiniHBaseCluster();
}
try {
// create the job
JobConf jobConf = new JobConf(util.getConfiguration());
jobConf.setJarByClass(util.getClass());
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJarsForClasses(jobConf, TestTableSnapshotInputFormat.class);
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName, COLUMNS, TestTableSnapshotMapper.class, ImmutableBytesWritable.class, NullWritable.class, jobConf, true, tableDir);
jobConf.setReducerClass(TestTableSnapshotInputFormat.TestTableSnapshotReducer.class);
jobConf.setNumReduceTasks(1);
jobConf.setOutputFormat(NullOutputFormat.class);
RunningJob job = JobClient.runJob(jobConf);
Assert.assertTrue(job.isSuccessful());
} finally {
if (!shutdownCluster) {
util.getAdmin().deleteSnapshot(snapshotName);
util.deleteTable(tableName);
}
}
}
use of org.apache.hadoop.mapred.JobConf in project hive by apache.
the class TestHiveAccumuloHelper method testISEIsPropagated.
@Test(expected = IllegalStateException.class)
public void testISEIsPropagated() throws Exception {
final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
final JobConf jobConf = Mockito.mock(JobConf.class);
final Class<?> inputOrOutputFormatClass = AccumuloInputFormat.class;
final String zookeepers = "localhost:2181";
final String instanceName = "accumulo_instance";
final boolean useSasl = false;
// Call the real "public" method
Mockito.doCallRealMethod().when(helper).setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, instanceName, useSasl);
// Mock the private one to throw the ISE
Mockito.doThrow(new IllegalStateException()).when(helper).setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, zookeepers, instanceName, useSasl);
// Should throw an IllegalStateException
helper.setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, instanceName, useSasl);
}
use of org.apache.hadoop.mapred.JobConf in project hive by apache.
the class TestHiveAccumuloHelper method testISEIsPropagatedWithReflection.
@Test(expected = IllegalStateException.class)
public void testISEIsPropagatedWithReflection() throws Exception {
final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
final JobConf jobConf = Mockito.mock(JobConf.class);
final Class<?> inputOrOutputFormatClass = AccumuloInputFormat.class;
final String zookeepers = "localhost:2181";
final String instanceName = "accumulo_instance";
final boolean useSasl = false;
// Call the real "public" method
Mockito.doCallRealMethod().when(helper).setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, instanceName, useSasl);
// Mock the private one to throw the IAE
Mockito.doThrow(new InvocationTargetException(new IllegalStateException())).when(helper).setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, zookeepers, instanceName, useSasl);
// Should throw an IllegalStateException
helper.setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, instanceName, useSasl);
}
Aggregations