use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class TestMRCJCReflectionUtils method testSetConf.
/**
* This is to test backward compatibility of ReflectionUtils for
* JobConfigurable objects.
* This should be made deprecated along with the mapred package HADOOP-1230.
* Should be removed when mapred package is removed.
*/
@Test
public void testSetConf() {
JobConfigurableOb ob = new JobConfigurableOb();
ReflectionUtils.setConf(ob, new Configuration());
assertFalse(ob.configured);
ReflectionUtils.setConf(ob, new JobConf());
assertTrue(ob.configured);
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class OldAPICombinerTest method getOldAPIJobconf.
private static JobConf getOldAPIJobconf(Configuration configuration, String name, String input, String output) throws Exception {
final JobConf jobConf = new JobConf(configuration);
final FileSystem fs = FileSystem.get(configuration);
if (fs.exists(new Path(output))) {
fs.delete(new Path(output), true);
}
fs.close();
jobConf.setJobName(name);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(IntWritable.class);
jobConf.setMapperClass(WordCountWithOldAPI.TokenizerMapperWithOldAPI.class);
jobConf.setCombinerClass(WordCountWithOldAPI.IntSumReducerWithOldAPI.class);
jobConf.setReducerClass(WordCountWithOldAPI.IntSumReducerWithOldAPI.class);
jobConf.setInputFormat(SequenceFileInputFormat.class);
jobConf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(jobConf, new Path(input));
FileOutputFormat.setOutputPath(jobConf, new Path(output));
return jobConf;
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class DistCh method createJobConf.
private static JobConf createJobConf(Configuration conf) {
JobConf jobconf = new JobConf(conf, DistCh.class);
jobconf.setJobName(NAME);
jobconf.setMapSpeculativeExecution(false);
jobconf.setInputFormat(ChangeInputFormat.class);
jobconf.setOutputKeyClass(Text.class);
jobconf.setOutputValueClass(Text.class);
jobconf.setMapperClass(ChangeFilesMapper.class);
jobconf.setNumReduceTasks(0);
return jobconf;
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class TestCompressionEmulationUtils method testExtractCompressionConfigs.
/**
* Test if
* {@link CompressionEmulationUtil#configureCompressionEmulation(
* org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.JobConf)}
* can extract compression related configuration parameters.
*/
@Test
public void testExtractCompressionConfigs() {
JobConf source = new JobConf();
JobConf target = new JobConf();
// set the default values
source.setBoolean(FileOutputFormat.COMPRESS, false);
source.set(FileOutputFormat.COMPRESS_CODEC, "MyDefaultCodec");
source.set(FileOutputFormat.COMPRESS_TYPE, "MyDefaultType");
source.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, false);
source.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, "MyDefaultCodec2");
CompressionEmulationUtil.configureCompressionEmulation(source, target);
// check default values
assertFalse(target.getBoolean(FileOutputFormat.COMPRESS, true));
assertEquals("MyDefaultCodec", target.get(FileOutputFormat.COMPRESS_CODEC));
assertEquals("MyDefaultType", target.get(FileOutputFormat.COMPRESS_TYPE));
assertFalse(target.getBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true));
assertEquals("MyDefaultCodec2", target.get(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC));
assertFalse(CompressionEmulationUtil.isInputCompressionEmulationEnabled(target));
// set new values
source.setBoolean(FileOutputFormat.COMPRESS, true);
source.set(FileOutputFormat.COMPRESS_CODEC, "MyCodec");
source.set(FileOutputFormat.COMPRESS_TYPE, "MyType");
source.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
source.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, "MyCodec2");
org.apache.hadoop.mapred.FileInputFormat.setInputPaths(source, "file.gz");
// reset
target = new JobConf();
CompressionEmulationUtil.configureCompressionEmulation(source, target);
// check new values
assertTrue(target.getBoolean(FileOutputFormat.COMPRESS, false));
assertEquals("MyCodec", target.get(FileOutputFormat.COMPRESS_CODEC));
assertEquals("MyType", target.get(FileOutputFormat.COMPRESS_TYPE));
assertTrue(target.getBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, false));
assertEquals("MyCodec2", target.get(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC));
assertTrue(CompressionEmulationUtil.isInputCompressionEmulationEnabled(target));
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class TestCompressionEmulationUtils method testFileQueueDecompression.
/**
* Test of {@link FileQueue} can identify compressed file and provide
* readers to extract uncompressed data only if input-compression is enabled.
*/
@Test
public void testFileQueueDecompression() throws IOException {
JobConf conf = new JobConf();
FileSystem lfs = FileSystem.getLocal(conf);
String inputLine = "Hi Hello!";
CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
CompressionEmulationUtil.setInputCompressionEmulationEnabled(conf, true);
org.apache.hadoop.mapred.FileOutputFormat.setCompressOutput(conf, true);
org.apache.hadoop.mapred.FileOutputFormat.setOutputCompressorClass(conf, GzipCodec.class);
// define the test's root temp directory
Path rootTempDir = new Path(System.getProperty("test.build.data", "/tmp")).makeQualified(lfs.getUri(), lfs.getWorkingDirectory());
Path tempDir = new Path(rootTempDir, "TestFileQueueDecompression");
lfs.delete(tempDir, true);
// create a compressed file
Path compressedFile = new Path(tempDir, "test");
OutputStream out = CompressionEmulationUtil.getPossiblyCompressedOutputStream(compressedFile, conf);
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out));
writer.write(inputLine);
writer.close();
compressedFile = compressedFile.suffix(".gz");
// now read back the data from the compressed stream using FileQueue
long fileSize = lfs.listStatus(compressedFile)[0].getLen();
CombineFileSplit split = new CombineFileSplit(new Path[] { compressedFile }, new long[] { fileSize });
FileQueue queue = new FileQueue(split, conf);
byte[] bytes = new byte[inputLine.getBytes().length];
queue.read(bytes);
queue.close();
String readLine = new String(bytes);
assertEquals("Compression/Decompression error", inputLine, readLine);
}
Aggregations