use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project hbase by apache.
the class TestIdentityTableMap method shouldCollectPredefinedTimes.
@Test
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCollectPredefinedTimes() throws IOException {
int recordNumber = 999;
Result resultMock = mock(Result.class);
IdentityTableMap identityTableMap = null;
try {
Reporter reporterMock = mock(Reporter.class);
identityTableMap = new IdentityTableMap();
ImmutableBytesWritable bytesWritableMock = mock(ImmutableBytesWritable.class);
OutputCollector<ImmutableBytesWritable, Result> outputCollectorMock = mock(OutputCollector.class);
for (int i = 0; i < recordNumber; i++) identityTableMap.map(bytesWritableMock, resultMock, outputCollectorMock, reporterMock);
verify(outputCollectorMock, times(recordNumber)).collect(Mockito.any(ImmutableBytesWritable.class), Mockito.any(Result.class));
} finally {
if (identityTableMap != null)
identityTableMap.close();
}
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project hbase by apache.
the class TestTableInputFormat method runTestMapred.
/**
* Create table data and run tests on specified htable using the
* o.a.h.hbase.mapred API.
*
* @param table
* @throws IOException
*/
static void runTestMapred(Table table) throws IOException {
org.apache.hadoop.hbase.mapred.TableRecordReader trr = new org.apache.hadoop.hbase.mapred.TableRecordReader();
trr.setStartRow("aaa".getBytes());
trr.setEndRow("zzz".getBytes());
trr.setHTable(table);
trr.setInputColumns(columns);
trr.init();
Result r = new Result();
ImmutableBytesWritable key = new ImmutableBytesWritable();
boolean more = trr.next(key, r);
assertTrue(more);
checkResult(r, key, "aaa".getBytes(), "value aaa".getBytes());
more = trr.next(key, r);
assertTrue(more);
checkResult(r, key, "bbb".getBytes(), "value bbb".getBytes());
// no more data
more = trr.next(key, r);
assertFalse(more);
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project hbase by apache.
the class TestTableSnapshotInputFormat method verifyWithMockedMapReduce.
private void verifyWithMockedMapReduce(JobConf job, int numRegions, int expectedNumSplits, byte[] startRow, byte[] stopRow) throws IOException, InterruptedException {
TableSnapshotInputFormat tsif = new TableSnapshotInputFormat();
InputSplit[] splits = tsif.getSplits(job, 0);
Assert.assertEquals(expectedNumSplits, splits.length);
HBaseTestingUtility.SeenRowTracker rowTracker = new HBaseTestingUtility.SeenRowTracker(startRow, stopRow);
for (int i = 0; i < splits.length; i++) {
// validate input split
InputSplit split = splits[i];
Assert.assertTrue(split instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit);
// validate record reader
OutputCollector collector = mock(OutputCollector.class);
Reporter reporter = mock(Reporter.class);
RecordReader<ImmutableBytesWritable, Result> rr = tsif.getRecordReader(split, job, reporter);
// validate we can read all the data back
ImmutableBytesWritable key = rr.createKey();
Result value = rr.createValue();
while (rr.next(key, value)) {
verifyRowFromMap(key, value);
rowTracker.addRow(key.copyBytes());
}
rr.close();
}
// validate all rows are seen
rowTracker.validate();
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project hbase by apache.
the class TestTableSnapshotInputFormat method verifyWithMockedMapReduce.
private void verifyWithMockedMapReduce(Job job, int numRegions, int expectedNumSplits, byte[] startRow, byte[] stopRow) throws IOException, InterruptedException {
TableSnapshotInputFormat tsif = new TableSnapshotInputFormat();
List<InputSplit> splits = tsif.getSplits(job);
Assert.assertEquals(expectedNumSplits, splits.size());
HBaseTestingUtility.SeenRowTracker rowTracker = new HBaseTestingUtility.SeenRowTracker(startRow, stopRow);
for (int i = 0; i < splits.size(); i++) {
// validate input split
InputSplit split = splits.get(i);
Assert.assertTrue(split instanceof TableSnapshotRegionSplit);
// validate record reader
TaskAttemptContext taskAttemptContext = mock(TaskAttemptContext.class);
when(taskAttemptContext.getConfiguration()).thenReturn(job.getConfiguration());
RecordReader<ImmutableBytesWritable, Result> rr = tsif.createRecordReader(split, taskAttemptContext);
rr.initialize(split, taskAttemptContext);
// validate we can read all the data back
while (rr.nextKeyValue()) {
byte[] row = rr.getCurrentKey().get();
verifyRowFromMap(rr.getCurrentKey(), rr.getCurrentValue());
rowTracker.addRow(row);
}
rr.close();
}
// validate all rows are seen
rowTracker.validate();
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project hbase by apache.
the class TestGroupingTableMapper method testGroupingTableMapper.
/**
* Test GroupingTableMapper class
*/
@Test
public void testGroupingTableMapper() throws Exception {
GroupingTableMapper mapper = new GroupingTableMapper();
Configuration configuration = new Configuration();
configuration.set(GroupingTableMapper.GROUP_COLUMNS, "family1:clm family2:clm");
mapper.setConf(configuration);
Result result = mock(Result.class);
@SuppressWarnings("unchecked") Mapper<ImmutableBytesWritable, Result, ImmutableBytesWritable, Result>.Context<ImmutableBytesWritable, Result, ImmutableBytesWritable, Result> context = mock(Mapper.Context.class);
context.write(any(ImmutableBytesWritable.class), any(Result.class));
List<Cell> keyValue = new ArrayList<>();
byte[] row = {};
keyValue.add(new KeyValue(row, Bytes.toBytes("family2"), Bytes.toBytes("clm"), Bytes.toBytes("value1")));
keyValue.add(new KeyValue(row, Bytes.toBytes("family1"), Bytes.toBytes("clm"), Bytes.toBytes("value2")));
when(result.listCells()).thenReturn(keyValue);
mapper.map(null, result, context);
// template data
byte[][] data = { Bytes.toBytes("value1"), Bytes.toBytes("value2") };
ImmutableBytesWritable ibw = mapper.createGroupKey(data);
verify(context).write(ibw, result);
}
Aggregations