Search in sources :

Example 6 with Reporter

use of org.apache.hadoop.mapred.Reporter in project hbase by apache.

the class TestGroupingTableMap method shouldCreateNewKeyAlthoughExtraKey.

@Test
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCreateNewKeyAlthoughExtraKey() throws Exception {
    GroupingTableMap gTableMap = null;
    try {
        Result result = mock(Result.class);
        Reporter reporter = mock(Reporter.class);
        gTableMap = new GroupingTableMap();
        Configuration cfg = new Configuration();
        cfg.set(GroupingTableMap.GROUP_COLUMNS, "familyA:qualifierA familyB:qualifierB");
        JobConf jobConf = new JobConf(cfg);
        gTableMap.configure(jobConf);
        byte[] row = {};
        List<Cell> keyValues = ImmutableList.<Cell>of(new KeyValue(row, "familyA".getBytes(), "qualifierA".getBytes(), Bytes.toBytes("1111")), new KeyValue(row, "familyB".getBytes(), "qualifierB".getBytes(), Bytes.toBytes("2222")), new KeyValue(row, "familyC".getBytes(), "qualifierC".getBytes(), Bytes.toBytes("3333")));
        when(result.listCells()).thenReturn(keyValues);
        OutputCollector<ImmutableBytesWritable, Result> outputCollectorMock = mock(OutputCollector.class);
        gTableMap.map(null, result, outputCollectorMock, reporter);
        verify(result).listCells();
        verify(outputCollectorMock, times(1)).collect(any(ImmutableBytesWritable.class), any(Result.class));
        verifyNoMoreInteractions(outputCollectorMock);
    } finally {
        if (gTableMap != null)
            gTableMap.close();
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Configuration(org.apache.hadoop.conf.Configuration) Reporter(org.apache.hadoop.mapred.Reporter) JobConf(org.apache.hadoop.mapred.JobConf) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 7 with Reporter

use of org.apache.hadoop.mapred.Reporter in project hbase by apache.

the class TestIdentityTableMap method shouldCollectPredefinedTimes.

@Test
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCollectPredefinedTimes() throws IOException {
    int recordNumber = 999;
    Result resultMock = mock(Result.class);
    IdentityTableMap identityTableMap = null;
    try {
        Reporter reporterMock = mock(Reporter.class);
        identityTableMap = new IdentityTableMap();
        ImmutableBytesWritable bytesWritableMock = mock(ImmutableBytesWritable.class);
        OutputCollector<ImmutableBytesWritable, Result> outputCollectorMock = mock(OutputCollector.class);
        for (int i = 0; i < recordNumber; i++) identityTableMap.map(bytesWritableMock, resultMock, outputCollectorMock, reporterMock);
        verify(outputCollectorMock, times(recordNumber)).collect(Mockito.any(ImmutableBytesWritable.class), Mockito.any(Result.class));
    } finally {
        if (identityTableMap != null)
            identityTableMap.close();
    }
}
Also used : ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Reporter(org.apache.hadoop.mapred.Reporter) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 8 with Reporter

use of org.apache.hadoop.mapred.Reporter in project hbase by apache.

the class TestRowCounter method shouldRegInReportEveryIncomingRow.

@Test
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldRegInReportEveryIncomingRow() throws IOException {
    int iterationNumber = 999;
    RowCounter.RowCounterMapper mapper = new RowCounter.RowCounterMapper();
    Reporter reporter = mock(Reporter.class);
    for (int i = 0; i < iterationNumber; i++) mapper.map(mock(ImmutableBytesWritable.class), mock(Result.class), mock(OutputCollector.class), reporter);
    Mockito.verify(reporter, times(iterationNumber)).incrCounter(any(Enum.class), anyInt());
}
Also used : RowCounterMapper(org.apache.hadoop.hbase.mapred.RowCounter.RowCounterMapper) Reporter(org.apache.hadoop.mapred.Reporter) RowCounterMapper(org.apache.hadoop.hbase.mapred.RowCounter.RowCounterMapper) Test(org.junit.Test)

Example 9 with Reporter

use of org.apache.hadoop.mapred.Reporter in project hbase by apache.

the class TestTableSnapshotInputFormat method verifyWithMockedMapReduce.

private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expectedNumSplits, byte[] startRow, byte[] stopRow) throws IOException, InterruptedException {
    TableSnapshotInputFormat tsif = new TableSnapshotInputFormat();
    InputSplit[] splits = tsif.getSplits(job, 0);
    Assert.assertEquals(expectedNumSplits, splits.length);
    HBaseTestingUtility.SeenRowTracker rowTracker = new HBaseTestingUtility.SeenRowTracker(startRow, stopRow);
    for (int i = 0; i < splits.length; i++) {
        // validate input split
        InputSplit split = splits[i];
        Assert.assertTrue(split instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit);
        // validate record reader
        OutputCollector collector = mock(OutputCollector.class);
        Reporter reporter = mock(Reporter.class);
        RecordReader<ImmutableBytesWritable, Result> rr = tsif.getRecordReader(split, job, reporter);
        // validate we can read all the data back
        ImmutableBytesWritable key = rr.createKey();
        Result value = rr.createValue();
        while (rr.next(key, value)) {
            verifyRowFromMap(key, value);
            rowTracker.addRow(key.copyBytes());
        }
        rr.close();
    }
    // validate all rows are seen
    rowTracker.validate();
}
Also used : OutputCollector(org.apache.hadoop.mapred.OutputCollector) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Reporter(org.apache.hadoop.mapred.Reporter) Result(org.apache.hadoop.hbase.client.Result) HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) InputSplit(org.apache.hadoop.mapred.InputSplit)

Example 10 with Reporter

use of org.apache.hadoop.mapred.Reporter in project hive by apache.

the class StaticPartitionFileRecordWriterContainer method close.

@Override
public void close(TaskAttemptContext context) throws IOException, InterruptedException {
    Reporter reporter = InternalUtil.createReporter(context);
    getBaseRecordWriter().close(reporter);
}
Also used : Reporter(org.apache.hadoop.mapred.Reporter)

Aggregations

Reporter (org.apache.hadoop.mapred.Reporter)23 JobConf (org.apache.hadoop.mapred.JobConf)13 Test (org.junit.Test)12 FileSystem (org.apache.hadoop.fs.FileSystem)6 Result (org.apache.hadoop.hbase.client.Result)5 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)5 InputSplit (org.apache.hadoop.mapred.InputSplit)5 Path (org.apache.hadoop.fs.Path)4 CompressionCodec (org.apache.hadoop.io.compress.CompressionCodec)4 Counter (org.apache.hadoop.mapred.Counters.Counter)4 File (java.io.File)3 IOException (java.io.IOException)3 Configuration (org.apache.hadoop.conf.Configuration)3 LocalDirAllocator (org.apache.hadoop.fs.LocalDirAllocator)3 Cell (org.apache.hadoop.hbase.Cell)3 KeyValue (org.apache.hadoop.hbase.KeyValue)3 LongWritable (org.apache.hadoop.io.LongWritable)3 Text (org.apache.hadoop.io.Text)3 MapOutputFile (org.apache.hadoop.mapred.MapOutputFile)3 ShuffleConsumerPlugin (org.apache.hadoop.mapred.ShuffleConsumerPlugin)3