use of org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile in project hadoop by apache.
the class CombinerTest method startUp.
@Before
public void startUp() throws Exception {
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
final ScenarioConfiguration conf = new ScenarioConfiguration();
conf.addcombinerConf();
this.fs = FileSystem.get(conf);
this.inputpath = TestConstants.NATIVETASK_COMBINER_TEST_INPUTDIR + "/wordcount";
if (!fs.exists(new Path(inputpath))) {
new TestInputFile(conf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 1000000), Text.class.getName(), Text.class.getName(), conf).createSequenceTestFile(inputpath, 1, (byte) ('a'));
}
this.nativeoutputpath = TestConstants.NATIVETASK_COMBINER_TEST_NATIVE_OUTPUTDIR + "/nativewordcount";
this.hadoopoutputpath = TestConstants.NATIVETASK_COMBINER_TEST_NORMAL_OUTPUTDIR + "/normalwordcount";
}
use of org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile in project hadoop by apache.
the class LargeKVCombinerTest method testLargeValueCombiner.
@Test
public void testLargeValueCombiner() throws Exception {
final Configuration normalConf = ScenarioConfiguration.getNormalConfiguration();
final Configuration nativeConf = ScenarioConfiguration.getNativeConfiguration();
normalConf.addResource(TestConstants.COMBINER_CONF_PATH);
nativeConf.addResource(TestConstants.COMBINER_CONF_PATH);
// 4M
final int deafult_KVSize_Maximum = 1 << 22;
final int KVSize_Maximum = normalConf.getInt(TestConstants.NATIVETASK_KVSIZE_MAX_LARGEKV_TEST, deafult_KVSize_Maximum);
final String inputPath = TestConstants.NATIVETASK_COMBINER_TEST_INPUTDIR + "/largeKV";
final String nativeOutputPath = TestConstants.NATIVETASK_COMBINER_TEST_NATIVE_OUTPUTDIR + "/nativeLargeKV";
final String hadoopOutputPath = TestConstants.NATIVETASK_COMBINER_TEST_NORMAL_OUTPUTDIR + "/normalLargeKV";
final FileSystem fs = FileSystem.get(normalConf);
for (int i = 65536; i <= KVSize_Maximum; i *= 4) {
int max = i;
int min = Math.max(i / 4, max - 10);
LOG.info("===KV Size Test: min size: " + min + ", max size: " + max);
normalConf.set(TestConstants.NATIVETASK_KVSIZE_MIN, String.valueOf(min));
normalConf.set(TestConstants.NATIVETASK_KVSIZE_MAX, String.valueOf(max));
nativeConf.set(TestConstants.NATIVETASK_KVSIZE_MIN, String.valueOf(min));
nativeConf.set(TestConstants.NATIVETASK_KVSIZE_MAX, String.valueOf(max));
fs.delete(new Path(inputPath), true);
new TestInputFile(normalConf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 1000000), IntWritable.class.getName(), Text.class.getName(), normalConf).createSequenceTestFile(inputPath, 1);
final Job normaljob = CombinerTest.getJob("normalwordcount", normalConf, inputPath, hadoopOutputPath);
final Job nativejob = CombinerTest.getJob("nativewordcount", nativeConf, inputPath, nativeOutputPath);
assertTrue(nativejob.waitForCompletion(true));
assertTrue(normaljob.waitForCompletion(true));
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
final String reason = "LargeKVCombinerTest failed with, min size: " + min + ", max size: " + max + ", normal out: " + hadoopOutputPath + ", native Out: " + nativeOutputPath;
assertEquals(reason, true, compareRet);
ResultVerifier.verifyCounters(normaljob, nativejob, true);
}
fs.close();
}
use of org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile in project hadoop by apache.
the class NonSortTest method startUp.
@Before
public void startUp() throws Exception {
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
final ScenarioConfiguration conf = new ScenarioConfiguration();
conf.addNonSortTestConf();
final FileSystem fs = FileSystem.get(conf);
final Path path = new Path(TestConstants.NATIVETASK_NONSORT_TEST_INPUTDIR);
if (!fs.exists(path)) {
int filesize = conf.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE, 10000000);
new TestInputFile(filesize, Text.class.getName(), Text.class.getName(), conf).createSequenceTestFile(path.toString());
}
fs.close();
}
use of org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile in project hadoop by apache.
the class OldAPICombinerTest method startUp.
@Before
public void startUp() throws Exception {
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
final ScenarioConfiguration conf = new ScenarioConfiguration();
conf.addcombinerConf();
this.fs = FileSystem.get(conf);
this.inputpath = TestConstants.NATIVETASK_COMBINER_TEST_INPUTDIR + "/wordcount";
if (!fs.exists(new Path(inputpath))) {
new TestInputFile(conf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 1000000), Text.class.getName(), Text.class.getName(), conf).createSequenceTestFile(inputpath, 1, (byte) ('a'));
}
}
use of org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile in project hadoop by apache.
the class CompressTest method startUp.
@Before
public void startUp() throws Exception {
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
final ScenarioConfiguration conf = new ScenarioConfiguration();
final FileSystem fs = FileSystem.get(conf);
final Path path = new Path(TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR);
fs.delete(path, true);
if (!fs.exists(path)) {
new TestInputFile(hadoopConf.getInt(TestConstants.NATIVETASK_COMPRESS_FILESIZE, 100000), Text.class.getName(), Text.class.getName(), conf).createSequenceTestFile(TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR);
}
fs.close();
}
Aggregations