use of org.apache.hadoop.hbase.Cell in project hbase by apache.
the class TestGroupingTableMap method shouldCreateNewKeyAlthoughExtraKey.
@Test
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCreateNewKeyAlthoughExtraKey() throws Exception {
GroupingTableMap gTableMap = null;
try {
Result result = mock(Result.class);
Reporter reporter = mock(Reporter.class);
gTableMap = new GroupingTableMap();
Configuration cfg = new Configuration();
cfg.set(GroupingTableMap.GROUP_COLUMNS, "familyA:qualifierA familyB:qualifierB");
JobConf jobConf = new JobConf(cfg);
gTableMap.configure(jobConf);
byte[] row = {};
List<Cell> keyValues = ImmutableList.<Cell>of(new KeyValue(row, "familyA".getBytes(), "qualifierA".getBytes(), Bytes.toBytes("1111")), new KeyValue(row, "familyB".getBytes(), "qualifierB".getBytes(), Bytes.toBytes("2222")), new KeyValue(row, "familyC".getBytes(), "qualifierC".getBytes(), Bytes.toBytes("3333")));
when(result.listCells()).thenReturn(keyValues);
OutputCollector<ImmutableBytesWritable, Result> outputCollectorMock = mock(OutputCollector.class);
gTableMap.map(null, result, outputCollectorMock, reporter);
verify(result).listCells();
verify(outputCollectorMock, times(1)).collect(any(ImmutableBytesWritable.class), any(Result.class));
verifyNoMoreInteractions(outputCollectorMock);
} finally {
if (gTableMap != null)
gTableMap.close();
}
}
use of org.apache.hadoop.hbase.Cell in project hbase by apache.
the class TestWALRecordReader method testSplit.
/**
* Create a new reader from the split, and match the edits against the passed columns.
*/
private void testSplit(InputSplit split, byte[]... columns) throws Exception {
final WALRecordReader reader = getReader();
reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
for (byte[] column : columns) {
assertTrue(reader.nextKeyValue());
Cell cell = reader.getCurrentValue().getCells().get(0);
if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength())) {
assertTrue("expected [" + Bytes.toString(column) + "], actual [" + Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]", false);
}
}
assertFalse(reader.nextKeyValue());
reader.close();
}
use of org.apache.hadoop.hbase.Cell in project hbase by apache.
the class TestTimeRangeMapRed method verify.
private void verify(final Table table) throws IOException {
Scan scan = new Scan();
scan.addColumn(FAMILY_NAME, COLUMN_NAME);
scan.setMaxVersions(1);
ResultScanner scanner = table.getScanner(scan);
for (Result r : scanner) {
for (Cell kv : r.listCells()) {
log.debug(Bytes.toString(r.getRow()) + "\t" + Bytes.toString(CellUtil.cloneFamily(kv)) + "\t" + Bytes.toString(CellUtil.cloneQualifier(kv)) + "\t" + kv.getTimestamp() + "\t" + Bytes.toBoolean(CellUtil.cloneValue(kv)));
org.junit.Assert.assertEquals(TIMESTAMP.get(kv.getTimestamp()), Bytes.toBoolean(CellUtil.cloneValue(kv)));
}
}
scanner.close();
}
use of org.apache.hadoop.hbase.Cell in project hbase by apache.
the class TestWideScanner method testWideScanBatching.
@Test
public void testWideScanBatching() throws IOException {
final int batch = 256;
try {
this.r = createNewHRegion(TESTTABLEDESC, null, null);
int inserted = addWideContent(this.r);
List<Cell> results = new ArrayList<>();
Scan scan = new Scan();
scan.addFamily(A);
scan.addFamily(B);
scan.addFamily(C);
scan.setMaxVersions(100);
scan.setBatch(batch);
InternalScanner s = r.getScanner(scan);
int total = 0;
int i = 0;
boolean more;
do {
more = s.next(results);
i++;
LOG.info("iteration #" + i + ", results.size=" + results.size());
// assert that the result set is no larger
assertTrue(results.size() <= batch);
total += results.size();
if (results.size() > 0) {
// assert that all results are from the same row
byte[] row = CellUtil.cloneRow(results.get(0));
for (Cell kv : results) {
assertTrue(Bytes.equals(row, CellUtil.cloneRow(kv)));
}
}
results.clear();
// trigger ChangedReadersObservers
Iterator<KeyValueScanner> scanners = ((HRegion.RegionScannerImpl) s).storeHeap.getHeap().iterator();
while (scanners.hasNext()) {
StoreScanner ss = (StoreScanner) scanners.next();
ss.updateReaders(new ArrayList<>());
}
} while (more);
// assert that the scanner returned all values
LOG.info("inserted " + inserted + ", scanned " + total);
assertEquals(total, inserted);
s.close();
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.r);
}
}
use of org.apache.hadoop.hbase.Cell in project hbase by apache.
the class TestStore method testMultipleTimestamps.
/**
* Test to ensure correctness when using Stores with multiple timestamps
* @throws IOException
*/
@Test
public void testMultipleTimestamps() throws IOException {
int numRows = 1;
long[] timestamps1 = new long[] { 1, 5, 10, 20 };
long[] timestamps2 = new long[] { 30, 80 };
init(this.name.getMethodName());
List<Cell> kvList1 = getKeyValueSet(timestamps1, numRows, qf1, family);
for (Cell kv : kvList1) {
this.store.add(kv, null);
}
this.store.snapshot();
flushStore(store, id++);
List<Cell> kvList2 = getKeyValueSet(timestamps2, numRows, qf1, family);
for (Cell kv : kvList2) {
this.store.add(kv, null);
}
List<Cell> result;
Get get = new Get(Bytes.toBytes(1));
get.addColumn(family, qf1);
get.setTimeRange(0, 15);
result = HBaseTestingUtility.getFromStoreFile(store, get);
Assert.assertTrue(result.size() > 0);
get.setTimeRange(40, 90);
result = HBaseTestingUtility.getFromStoreFile(store, get);
Assert.assertTrue(result.size() > 0);
get.setTimeRange(10, 45);
result = HBaseTestingUtility.getFromStoreFile(store, get);
Assert.assertTrue(result.size() > 0);
get.setTimeRange(80, 145);
result = HBaseTestingUtility.getFromStoreFile(store, get);
Assert.assertTrue(result.size() > 0);
get.setTimeRange(1, 2);
result = HBaseTestingUtility.getFromStoreFile(store, get);
Assert.assertTrue(result.size() > 0);
get.setTimeRange(90, 200);
result = HBaseTestingUtility.getFromStoreFile(store, get);
Assert.assertTrue(result.size() == 0);
}
Aggregations