use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestStoreScanner method testScanSameTimestamp.
@Test
public void testScanSameTimestamp() throws IOException {
// returns only 1 of these 2 even though same timestamp
KeyValue[] kvs = new KeyValue[] { KeyValueTestUtil.create("R1", "cf", "a", 1, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "a", 1, KeyValue.Type.Put, "dont-care") };
List<KeyValueScanner> scanners = Arrays.asList(new KeyValueScanner[] { new KeyValueScanFixture(CellComparator.COMPARATOR, kvs) });
Scan scanSpec = new Scan(Bytes.toBytes("R1"));
// this only uses maxVersions (default=1) and TimeRange (default=all)
try (StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners)) {
List<Cell> results = new ArrayList<>();
Assert.assertEquals(true, scan.next(results));
Assert.assertEquals(1, results.size());
Assert.assertEquals(kvs[0], results.get(0));
}
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestStoreScanner method testDeleteColumn.
@Test
public void testDeleteColumn() throws IOException {
KeyValue[] kvs = new KeyValue[] { KeyValueTestUtil.create("R1", "cf", "a", 10, KeyValue.Type.DeleteColumn, "dont-care"), KeyValueTestUtil.create("R1", "cf", "a", 9, KeyValue.Type.Delete, "dont-care"), KeyValueTestUtil.create("R1", "cf", "a", 8, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "b", 5, KeyValue.Type.Put, "dont-care") };
List<KeyValueScanner> scanners = scanFixture(kvs);
try (StoreScanner scan = new StoreScanner(new Scan(), scanInfo, scanType, null, scanners)) {
List<Cell> results = new ArrayList<>();
Assert.assertEquals(true, scan.next(results));
Assert.assertEquals(1, results.size());
Assert.assertEquals(kvs[3], results.get(0));
}
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestStoreScanner method testFullRowSpansBlocks.
@Test
public void testFullRowSpansBlocks() throws IOException {
// Do a Get against row FOUR. It spans two blocks.
Get get = new Get(FOUR);
Scan scan = new Scan(get);
CellGridStoreScanner scanner = new CellGridStoreScanner(scan, this.scanInfo, this.scanType);
try {
List<Cell> results = new ArrayList<>();
while (scanner.next(results)) {
continue;
}
// Should be four results of column 1 (though there are 5 rows in the CELL_GRID -- the
// TWO_POINT_TWO row does not have a a column ONE.
Assert.assertEquals(5, results.size());
// We should have gone the optimize route 6 times totally... an INCLUDE for the five cells
// in the row plus the DONE on the end.
Assert.assertEquals(6, scanner.count.get());
// For a full row Get, there should be no opportunity for scanner optimization.
Assert.assertEquals(0, scanner.optimization.get());
} finally {
scanner.close();
}
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestStoreScanner method testWildCardTtlScan.
/*
* Test expiration of KeyValues in combination with a configured TTL for
* a column family (as should be triggered in a major compaction).
*/
@Test
public void testWildCardTtlScan() throws IOException {
long now = System.currentTimeMillis();
KeyValue[] kvs = new KeyValue[] { KeyValueTestUtil.create("R1", "cf", "a", now - 1000, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "b", now - 10, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "c", now - 200, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "d", now - 10000, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "a", now, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "b", now - 10, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "c", now - 200, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "c", now - 1000, KeyValue.Type.Put, "dont-care") };
List<KeyValueScanner> scanners = scanFixture(kvs);
Scan scan = new Scan();
scan.setMaxVersions(1);
ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE, 0, CellComparator.COMPARATOR);
ScanType scanType = ScanType.USER_SCAN;
try (StoreScanner scanner = new StoreScanner(scan, scanInfo, scanType, null, scanners)) {
List<Cell> results = new ArrayList<>();
Assert.assertEquals(true, scanner.next(results));
Assert.assertEquals(2, results.size());
Assert.assertEquals(kvs[1], results.get(0));
Assert.assertEquals(kvs[2], results.get(1));
results.clear();
Assert.assertEquals(true, scanner.next(results));
Assert.assertEquals(3, results.size());
Assert.assertEquals(kvs[4], results.get(0));
Assert.assertEquals(kvs[5], results.get(1));
Assert.assertEquals(kvs[6], results.get(2));
results.clear();
Assert.assertEquals(false, scanner.next(results));
}
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestStoreScanner method testScannerReseekDoesntNPE.
@Test
public void testScannerReseekDoesntNPE() throws Exception {
List<KeyValueScanner> scanners = scanFixture(kvs);
try (StoreScanner scan = new StoreScanner(new Scan(), scanInfo, scanType, getCols("a", "d"), scanners)) {
// Previously a updateReaders twice in a row would cause an NPE. In test this would also
// normally cause an NPE because scan.store is null. So as long as we get through these
// two calls we are good and the bug was quashed.
scan.updateReaders(new ArrayList<>());
scan.updateReaders(new ArrayList<>());
scan.peek();
}
}
Aggregations