Search in sources :

Example 21 with MapContextImpl

use of org.apache.hadoop.mapreduce.task.MapContextImpl in project hadoop by apache.

the class TestDistCacheEmulation method validateSetupGenDC.

/**
   * Validate setupGenerateDistCacheData by validating <li>permissions of the
   * distributed cache directories and <li>content of the generated sequence
   * file. This includes validation of dist cache file paths and their file
   * sizes.
   */
private void validateSetupGenDC(Configuration jobConf, long[] sortedFileSizes) throws IOException, InterruptedException {
    // build things needed for validation
    long sumOfFileSizes = 0;
    for (int i = 0; i < sortedFileSizes.length; i++) {
        sumOfFileSizes += sortedFileSizes[i];
    }
    FileSystem fs = FileSystem.get(jobConf);
    assertEquals("Number of distributed cache files to be generated is wrong.", sortedFileSizes.length, jobConf.getInt(GenerateDistCacheData.GRIDMIX_DISTCACHE_FILE_COUNT, -1));
    assertEquals("Total size of dist cache files to be generated is wrong.", sumOfFileSizes, jobConf.getLong(GenerateDistCacheData.GRIDMIX_DISTCACHE_BYTE_COUNT, -1));
    Path filesListFile = new Path(jobConf.get(GenerateDistCacheData.GRIDMIX_DISTCACHE_FILE_LIST));
    FileStatus stat = fs.getFileStatus(filesListFile);
    assertEquals("Wrong permissions of dist Cache files list file " + filesListFile, new FsPermission((short) 0644), stat.getPermission());
    InputSplit split = new FileSplit(filesListFile, 0, stat.getLen(), (String[]) null);
    TaskAttemptContext taskContext = MapReduceTestUtil.createDummyMapTaskAttemptContext(jobConf);
    RecordReader<LongWritable, BytesWritable> reader = new GenerateDistCacheData.GenDCDataFormat().createRecordReader(split, taskContext);
    MapContext<LongWritable, BytesWritable, NullWritable, BytesWritable> mapContext = new MapContextImpl<LongWritable, BytesWritable, NullWritable, BytesWritable>(jobConf, taskContext.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split);
    reader.initialize(split, mapContext);
    // start validating setupGenerateDistCacheData
    doValidateSetupGenDC(reader, fs, sortedFileSizes);
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) MapContextImpl(org.apache.hadoop.mapreduce.task.MapContextImpl) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) BytesWritable(org.apache.hadoop.io.BytesWritable) FileSplit(org.apache.hadoop.mapreduce.lib.input.FileSplit) NullWritable(org.apache.hadoop.io.NullWritable) FileSystem(org.apache.hadoop.fs.FileSystem) FsPermission(org.apache.hadoop.fs.permission.FsPermission) LongWritable(org.apache.hadoop.io.LongWritable) InputSplit(org.apache.hadoop.mapreduce.InputSplit)

Example 22 with MapContextImpl

use of org.apache.hadoop.mapreduce.task.MapContextImpl in project hadoop by apache.

the class TestResourceUsageEmulators method testResourceUsageMatcherRunner.

/**
   * Test {@link LoadJob.ResourceUsageMatcherRunner}.
   */
@Test
@SuppressWarnings("unchecked")
public void testResourceUsageMatcherRunner() throws Exception {
    Configuration conf = new Configuration();
    FakeProgressive progress = new FakeProgressive();
    // set the resource calculator plugin
    conf.setClass(TTConfig.TT_RESOURCE_CALCULATOR_PLUGIN, DummyResourceCalculatorPlugin.class, ResourceCalculatorPlugin.class);
    // set the resources
    // set the resource implementation class
    conf.setClass(ResourceUsageMatcher.RESOURCE_USAGE_EMULATION_PLUGINS, TestResourceUsageEmulatorPlugin.class, ResourceUsageEmulatorPlugin.class);
    long currentTime = System.currentTimeMillis();
    // initialize the matcher class
    TaskAttemptID id = new TaskAttemptID("test", 1, TaskType.MAP, 1, 1);
    StatusReporter reporter = new DummyReporter(progress);
    TaskInputOutputContext context = new MapContextImpl(conf, id, null, null, null, reporter, null);
    FakeResourceUsageMatcherRunner matcher = new FakeResourceUsageMatcherRunner(context, null);
    // check if the matcher initialized the plugin
    String identifier = TestResourceUsageEmulatorPlugin.DEFAULT_IDENTIFIER;
    long initTime = TestResourceUsageEmulatorPlugin.testInitialization(identifier, conf);
    assertTrue("ResourceUsageMatcherRunner failed to initialize the" + " configured plugin", initTime > currentTime);
    // check the progress
    assertEquals("Progress mismatch in ResourceUsageMatcherRunner", 0, progress.getProgress(), 0D);
    // call match() and check progress
    progress.setProgress(0.01f);
    currentTime = System.currentTimeMillis();
    matcher.test();
    long emulateTime = TestResourceUsageEmulatorPlugin.testEmulation(identifier, conf);
    assertTrue("ProgressBasedResourceUsageMatcher failed to load and emulate" + " the configured plugin", emulateTime > currentTime);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) MapContextImpl(org.apache.hadoop.mapreduce.task.MapContextImpl) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) TaskInputOutputContext(org.apache.hadoop.mapreduce.TaskInputOutputContext) StatusReporter(org.apache.hadoop.mapreduce.StatusReporter) Test(org.junit.Test)

Aggregations

MapContextImpl (org.apache.hadoop.mapreduce.task.MapContextImpl)22 TaskAttemptContext (org.apache.hadoop.mapreduce.TaskAttemptContext)20 InputSplit (org.apache.hadoop.mapreduce.InputSplit)14 Test (org.junit.Test)13 LongWritable (org.apache.hadoop.io.LongWritable)12 Job (org.apache.hadoop.mapreduce.Job)11 BytesWritable (org.apache.hadoop.io.BytesWritable)10 Configuration (org.apache.hadoop.conf.Configuration)9 Path (org.apache.hadoop.fs.Path)9 Random (java.util.Random)8 Text (org.apache.hadoop.io.Text)6 BitSet (java.util.BitSet)5 IOException (java.io.IOException)4 IntWritable (org.apache.hadoop.io.IntWritable)4 ArrayList (java.util.ArrayList)3 FileSystem (org.apache.hadoop.fs.FileSystem)3 NullWritable (org.apache.hadoop.io.NullWritable)3 OutputCommitter (org.apache.hadoop.mapreduce.OutputCommitter)3 StatusReporter (org.apache.hadoop.mapreduce.StatusReporter)3 TaskAttemptID (org.apache.hadoop.mapreduce.TaskAttemptID)3