use of uk.gov.gchq.gaffer.hdfs.operation.MapReduce in project Gaffer by gchq.
the class HBaseAddElementsFromHdfsJobFactoryTest method shouldSetupJob.
@Test
public void shouldSetupJob() throws IOException, StoreException {
// Given
final JobConf localConf = createLocalConf();
final FileSystem fs = FileSystem.getLocal(localConf);
fs.mkdirs(new Path(outputDir));
final JobFactory factory = getJobFactory();
final Job job = mock(Job.class);
final MapReduce operation = getMapReduceOperation();
final HBaseStore store = new SingleUseMiniHBaseStore();
final Schema schema = Schema.fromJson(StreamUtil.schemas(getClass()));
final HBaseProperties properties = HBaseProperties.loadStoreProperties(StreamUtil.storeProps(getClass()));
store.initialise("graphId", schema, properties);
given(job.getConfiguration()).willReturn(localConf);
// When
factory.setupJob(job, operation, TextMapperGeneratorImpl.class.getName(), store);
// Then
verify(job).setJarByClass(factory.getClass());
verify(job).setJobName("Ingest HDFS data: Generator=" + TextMapperGeneratorImpl.class.getName() + ", output=" + outputDir);
verify(job).setMapperClass(AddElementsFromHdfsMapper.class);
verify(job).setMapOutputKeyClass(ImmutableBytesWritable.class);
verify(job).setMapOutputValueClass(KeyValue.class);
verify(job).setReducerClass(AddElementsFromHdfsReducer.class);
verify(job).setOutputKeyClass(ImmutableBytesWritable.class);
verify(job).setOutputValueClass(KeyValue.class);
verify(job).setOutputFormatClass(HFileOutputFormat2.class);
assertEquals(fs.makeQualified(new Path(outputDir)).toString(), job.getConfiguration().get("mapreduce.output.fileoutputformat.outputdir"));
verify(job).setNumReduceTasks(1);
}
Aggregations