Search in sources :

Example 1 with Context

use of org.apache.hadoop.mapreduce.Mapper.Context in project hadoop by apache.

the class TestGridMixClasses method testLoadMapper.

/*
   * test LoadMapper loadMapper should write to writer record for each reduce
   */
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test(timeout = 10000)
public void testLoadMapper() throws Exception {
    Configuration conf = new Configuration();
    conf.setInt(JobContext.NUM_REDUCES, 2);
    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
    TaskAttemptID taskId = new TaskAttemptID();
    RecordReader<NullWritable, GridmixRecord> reader = new FakeRecordReader();
    LoadRecordGkGrWriter writer = new LoadRecordGkGrWriter();
    OutputCommitter committer = new CustomOutputCommitter();
    StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter();
    LoadSplit split = getLoadSplit();
    MapContext<NullWritable, GridmixRecord, GridmixKey, GridmixRecord> mapContext = new MapContextImpl<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>(conf, taskId, reader, writer, committer, reporter, split);
    // context
    Context ctx = new WrappedMapper<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>().getMapContext(mapContext);
    reader.initialize(split, ctx);
    ctx.getConfiguration().setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
    CompressionEmulationUtil.setCompressionEmulationEnabled(ctx.getConfiguration(), true);
    LoadJob.LoadMapper mapper = new LoadJob.LoadMapper();
    // setup, map, clean
    mapper.run(ctx);
    Map<GridmixKey, GridmixRecord> data = writer.getData();
    // check result
    assertEquals(2, data.size());
}
Also used : Context(org.apache.hadoop.mapreduce.Mapper.Context) ReduceContext(org.apache.hadoop.mapreduce.ReduceContext) MapContext(org.apache.hadoop.mapreduce.MapContext) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) JobContext(org.apache.hadoop.mapred.JobContext) CustomOutputCommitter(org.apache.hadoop.CustomOutputCommitter) OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) Configuration(org.apache.hadoop.conf.Configuration) MapContextImpl(org.apache.hadoop.mapreduce.task.MapContextImpl) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) DummyReporter(org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl.DummyReporter) NullWritable(org.apache.hadoop.io.NullWritable) CustomOutputCommitter(org.apache.hadoop.CustomOutputCommitter) StatusReporter(org.apache.hadoop.mapreduce.StatusReporter) Test(org.junit.Test)

Example 2 with Context

use of org.apache.hadoop.mapreduce.Mapper.Context in project hbase by apache.

the class TestImportExport method testKeyValueImporter.

/**
 * Test map method of Importer
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testKeyValueImporter() throws Throwable {
    CellImporter importer = new CellImporter();
    Configuration configuration = new Configuration();
    Context ctx = mock(Context.class);
    when(ctx.getConfiguration()).thenReturn(configuration);
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArgument(0);
            MapReduceExtendedCell key = (MapReduceExtendedCell) invocation.getArgument(1);
            assertEquals("Key", Bytes.toString(writer.get()));
            assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
            return null;
        }
    }).when(ctx).write(any(), any());
    importer.setup(ctx);
    Result value = mock(Result.class);
    KeyValue[] keys = { new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), Bytes.toBytes("value")), new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), Bytes.toBytes("value1")) };
    when(value.rawCells()).thenReturn(keys);
    importer.map(new ImmutableBytesWritable(Bytes.toBytes("Key")), value, ctx);
}
Also used : Context(org.apache.hadoop.mapreduce.Mapper.Context) ObserverContext(org.apache.hadoop.hbase.coprocessor.ObserverContext) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) MapReduceExtendedCell(org.apache.hadoop.hbase.util.MapReduceExtendedCell) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) Result(org.apache.hadoop.hbase.client.Result) CellImporter(org.apache.hadoop.hbase.mapreduce.Import.CellImporter) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Test(org.junit.Test)

Example 3 with Context

use of org.apache.hadoop.mapreduce.Mapper.Context in project hadoop by apache.

the class TestGridMixClasses method testSleepMapper.

/*
   * test SleepMapper
   */
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test(timeout = 30000)
public void testSleepMapper() throws Exception {
    SleepJob.SleepMapper test = new SleepJob.SleepMapper();
    Configuration conf = new Configuration();
    conf.setInt(JobContext.NUM_REDUCES, 2);
    CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
    TaskAttemptID taskId = new TaskAttemptID();
    FakeRecordLLReader reader = new FakeRecordLLReader();
    LoadRecordGkNullWriter writer = new LoadRecordGkNullWriter();
    OutputCommitter committer = new CustomOutputCommitter();
    StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter();
    SleepSplit split = getSleepSplit();
    MapContext<LongWritable, LongWritable, GridmixKey, NullWritable> mapcontext = new MapContextImpl<LongWritable, LongWritable, GridmixKey, NullWritable>(conf, taskId, reader, writer, committer, reporter, split);
    Context context = new WrappedMapper<LongWritable, LongWritable, GridmixKey, NullWritable>().getMapContext(mapcontext);
    long start = System.currentTimeMillis();
    LOG.info("start:" + start);
    LongWritable key = new LongWritable(start + 2000);
    LongWritable value = new LongWritable(start + 2000);
    // should slip 2 sec
    test.map(key, value, context);
    LOG.info("finish:" + System.currentTimeMillis());
    assertTrue(System.currentTimeMillis() >= (start + 2000));
    test.cleanup(context);
    assertEquals(1, writer.getData().size());
}
Also used : Context(org.apache.hadoop.mapreduce.Mapper.Context) ReduceContext(org.apache.hadoop.mapreduce.ReduceContext) MapContext(org.apache.hadoop.mapreduce.MapContext) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) JobContext(org.apache.hadoop.mapred.JobContext) CustomOutputCommitter(org.apache.hadoop.CustomOutputCommitter) OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) Configuration(org.apache.hadoop.conf.Configuration) MapContextImpl(org.apache.hadoop.mapreduce.task.MapContextImpl) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) DummyReporter(org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl.DummyReporter) NullWritable(org.apache.hadoop.io.NullWritable) CustomOutputCommitter(org.apache.hadoop.CustomOutputCommitter) LongWritable(org.apache.hadoop.io.LongWritable) SleepSplit(org.apache.hadoop.mapred.gridmix.SleepJob.SleepSplit) StatusReporter(org.apache.hadoop.mapreduce.StatusReporter) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)3 Context (org.apache.hadoop.mapreduce.Mapper.Context)3 Test (org.junit.Test)3 CustomOutputCommitter (org.apache.hadoop.CustomOutputCommitter)2 NullWritable (org.apache.hadoop.io.NullWritable)2 JobContext (org.apache.hadoop.mapred.JobContext)2 MapContext (org.apache.hadoop.mapreduce.MapContext)2 OutputCommitter (org.apache.hadoop.mapreduce.OutputCommitter)2 ReduceContext (org.apache.hadoop.mapreduce.ReduceContext)2 StatusReporter (org.apache.hadoop.mapreduce.StatusReporter)2 TaskAttemptContext (org.apache.hadoop.mapreduce.TaskAttemptContext)2 TaskAttemptID (org.apache.hadoop.mapreduce.TaskAttemptID)2 MapContextImpl (org.apache.hadoop.mapreduce.task.MapContextImpl)2 DummyReporter (org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl.DummyReporter)2 KeyValue (org.apache.hadoop.hbase.KeyValue)1 Result (org.apache.hadoop.hbase.client.Result)1 ObserverContext (org.apache.hadoop.hbase.coprocessor.ObserverContext)1 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)1 CellImporter (org.apache.hadoop.hbase.mapreduce.Import.CellImporter)1 MapReduceExtendedCell (org.apache.hadoop.hbase.util.MapReduceExtendedCell)1