use of org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration in project hadoop by apache.
the class CombinerTest method startUp.
@Before
public void startUp() throws Exception {
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
final ScenarioConfiguration conf = new ScenarioConfiguration();
conf.addcombinerConf();
this.fs = FileSystem.get(conf);
this.inputpath = TestConstants.NATIVETASK_COMBINER_TEST_INPUTDIR + "/wordcount";
if (!fs.exists(new Path(inputpath))) {
new TestInputFile(conf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 1000000), Text.class.getName(), Text.class.getName(), conf).createSequenceTestFile(inputpath, 1, (byte) ('a'));
}
this.nativeoutputpath = TestConstants.NATIVETASK_COMBINER_TEST_NATIVE_OUTPUTDIR + "/nativewordcount";
this.hadoopoutputpath = TestConstants.NATIVETASK_COMBINER_TEST_NORMAL_OUTPUTDIR + "/normalwordcount";
}
use of org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration in project hadoop by apache.
the class TestInputFile method createSequenceTestFile.
public void createSequenceTestFile(String filepath, int base, byte start) throws Exception {
LOG.info("creating file " + filepath + "(" + filesize + " bytes)");
LOG.info(keyClsName + " " + valueClsName);
Class<?> tmpkeycls, tmpvaluecls;
try {
tmpkeycls = Class.forName(keyClsName);
} catch (final ClassNotFoundException e) {
throw new Exception("key class not found: ", e);
}
try {
tmpvaluecls = Class.forName(valueClsName);
} catch (final ClassNotFoundException e) {
throw new Exception("key class not found: ", e);
}
try {
final Path outputfilepath = new Path(filepath);
final ScenarioConfiguration conf = new ScenarioConfiguration();
writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(outputfilepath), SequenceFile.Writer.keyClass(tmpkeycls), SequenceFile.Writer.valueClass(tmpvaluecls));
} catch (final Exception e) {
e.printStackTrace();
}
int tmpfilesize = this.filesize;
while (tmpfilesize > DATABUFSIZE) {
nextRandomBytes(databuf, base, start);
final int size = flushBuf(DATABUFSIZE);
tmpfilesize -= size;
}
nextRandomBytes(databuf, base, start);
flushBuf(tmpfilesize);
if (writer != null) {
IOUtils.closeStream(writer);
} else {
throw new Exception("no writer to create sequenceTestFile!");
}
}
use of org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration in project hadoop by apache.
the class NonSortTest method startUp.
@Before
public void startUp() throws Exception {
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
final ScenarioConfiguration conf = new ScenarioConfiguration();
conf.addNonSortTestConf();
final FileSystem fs = FileSystem.get(conf);
final Path path = new Path(TestConstants.NATIVETASK_NONSORT_TEST_INPUTDIR);
if (!fs.exists(path)) {
int filesize = conf.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE, 10000000);
new TestInputFile(filesize, Text.class.getName(), Text.class.getName(), conf).createSequenceTestFile(path.toString());
}
fs.close();
}
use of org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration in project hadoop by apache.
the class CompressMapper method getCompressJob.
public static Job getCompressJob(String jobname, Configuration conf, String inputpath, String outputpath) throws Exception {
Job job = Job.getInstance(conf, jobname + "-CompressMapperJob");
job.setJarByClass(CompressMapper.class);
job.setMapperClass(TextCompressMapper.class);
job.setOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
// if output file exists ,delete it
final FileSystem hdfs = FileSystem.get(new ScenarioConfiguration());
if (hdfs.exists(new Path(outputpath))) {
hdfs.delete(new Path(outputpath), true);
}
hdfs.close();
job.setInputFormatClass(SequenceFileInputFormat.class);
FileInputFormat.addInputPath(job, new Path(inputpath));
FileOutputFormat.setOutputPath(job, new Path(outputpath));
return job;
}
use of org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration in project hadoop by apache.
the class CombinerTest method cleanUp.
@AfterClass
public static void cleanUp() throws IOException {
final FileSystem fs = FileSystem.get(new ScenarioConfiguration());
fs.delete(new Path(TestConstants.NATIVETASK_COMBINER_TEST_DIR), true);
fs.close();
}
Aggregations