Search in sources :

Example 51 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestStoreScanner method testScanSameTimestamp.

@Test
public void testScanSameTimestamp() throws IOException {
    // returns only 1 of these 2 even though same timestamp
    KeyValue[] kvs = new KeyValue[] { KeyValueTestUtil.create("R1", "cf", "a", 1, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "a", 1, KeyValue.Type.Put, "dont-care") };
    List<KeyValueScanner> scanners = Arrays.asList(new KeyValueScanner[] { new KeyValueScanFixture(CellComparator.COMPARATOR, kvs) });
    Scan scanSpec = new Scan(Bytes.toBytes("R1"));
    // this only uses maxVersions (default=1) and TimeRange (default=all)
    try (StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners)) {
        List<Cell> results = new ArrayList<>();
        Assert.assertEquals(true, scan.next(results));
        Assert.assertEquals(1, results.size());
        Assert.assertEquals(kvs[0], results.get(0));
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 52 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestStoreScanner method testDeleteColumn.

@Test
public void testDeleteColumn() throws IOException {
    KeyValue[] kvs = new KeyValue[] { KeyValueTestUtil.create("R1", "cf", "a", 10, KeyValue.Type.DeleteColumn, "dont-care"), KeyValueTestUtil.create("R1", "cf", "a", 9, KeyValue.Type.Delete, "dont-care"), KeyValueTestUtil.create("R1", "cf", "a", 8, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "b", 5, KeyValue.Type.Put, "dont-care") };
    List<KeyValueScanner> scanners = scanFixture(kvs);
    try (StoreScanner scan = new StoreScanner(new Scan(), scanInfo, scanType, null, scanners)) {
        List<Cell> results = new ArrayList<>();
        Assert.assertEquals(true, scan.next(results));
        Assert.assertEquals(1, results.size());
        Assert.assertEquals(kvs[3], results.get(0));
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 53 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestStoreScanner method testFullRowSpansBlocks.

@Test
public void testFullRowSpansBlocks() throws IOException {
    // Do a Get against row FOUR. It spans two blocks.
    Get get = new Get(FOUR);
    Scan scan = new Scan(get);
    CellGridStoreScanner scanner = new CellGridStoreScanner(scan, this.scanInfo, this.scanType);
    try {
        List<Cell> results = new ArrayList<>();
        while (scanner.next(results)) {
            continue;
        }
        // Should be four results of column 1 (though there are 5 rows in the CELL_GRID -- the
        // TWO_POINT_TWO row does not have a a column ONE.
        Assert.assertEquals(5, results.size());
        // We should have gone the optimize route 6 times totally... an INCLUDE for the five cells
        // in the row plus the DONE on the end.
        Assert.assertEquals(6, scanner.count.get());
        // For a full row Get, there should be no opportunity for scanner optimization.
        Assert.assertEquals(0, scanner.optimization.get());
    } finally {
        scanner.close();
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 54 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestStoreScanner method testWildCardTtlScan.

/*
   * Test expiration of KeyValues in combination with a configured TTL for
   * a column family (as should be triggered in a major compaction).
   */
@Test
public void testWildCardTtlScan() throws IOException {
    long now = System.currentTimeMillis();
    KeyValue[] kvs = new KeyValue[] { KeyValueTestUtil.create("R1", "cf", "a", now - 1000, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "b", now - 10, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "c", now - 200, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R1", "cf", "d", now - 10000, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "a", now, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "b", now - 10, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "c", now - 200, KeyValue.Type.Put, "dont-care"), KeyValueTestUtil.create("R2", "cf", "c", now - 1000, KeyValue.Type.Put, "dont-care") };
    List<KeyValueScanner> scanners = scanFixture(kvs);
    Scan scan = new Scan();
    scan.setMaxVersions(1);
    ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE, 0, CellComparator.COMPARATOR);
    ScanType scanType = ScanType.USER_SCAN;
    try (StoreScanner scanner = new StoreScanner(scan, scanInfo, scanType, null, scanners)) {
        List<Cell> results = new ArrayList<>();
        Assert.assertEquals(true, scanner.next(results));
        Assert.assertEquals(2, results.size());
        Assert.assertEquals(kvs[1], results.get(0));
        Assert.assertEquals(kvs[2], results.get(1));
        results.clear();
        Assert.assertEquals(true, scanner.next(results));
        Assert.assertEquals(3, results.size());
        Assert.assertEquals(kvs[4], results.get(0));
        Assert.assertEquals(kvs[5], results.get(1));
        Assert.assertEquals(kvs[6], results.get(2));
        results.clear();
        Assert.assertEquals(false, scanner.next(results));
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 55 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestStoreScanner method testScannerReseekDoesntNPE.

@Test
public void testScannerReseekDoesntNPE() throws Exception {
    List<KeyValueScanner> scanners = scanFixture(kvs);
    try (StoreScanner scan = new StoreScanner(new Scan(), scanInfo, scanType, getCols("a", "d"), scanners)) {
        // Previously a updateReaders twice in a row would cause an NPE.  In test this would also
        // normally cause an NPE because scan.store is null.  So as long as we get through these
        // two calls we are good and the bug was quashed.
        scan.updateReaders(new ArrayList<>());
        scan.updateReaders(new ArrayList<>());
        scan.peek();
    }
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Aggregations

Scan (org.apache.hadoop.hbase.client.Scan)950 Test (org.junit.Test)495 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)302 Result (org.apache.hadoop.hbase.client.Result)286 Cell (org.apache.hadoop.hbase.Cell)258 ArrayList (java.util.ArrayList)238 Table (org.apache.hadoop.hbase.client.Table)178 Put (org.apache.hadoop.hbase.client.Put)161 BaseConnectionlessQueryTest (org.apache.phoenix.query.BaseConnectionlessQueryTest)153 IOException (java.io.IOException)135 TableName (org.apache.hadoop.hbase.TableName)98 Delete (org.apache.hadoop.hbase.client.Delete)95 Filter (org.apache.hadoop.hbase.filter.Filter)95 KeyValue (org.apache.hadoop.hbase.KeyValue)84 Connection (org.apache.hadoop.hbase.client.Connection)81 SkipScanFilter (org.apache.phoenix.filter.SkipScanFilter)78 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)78 RowKeyComparisonFilter (org.apache.phoenix.filter.RowKeyComparisonFilter)72 Configuration (org.apache.hadoop.conf.Configuration)51 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)51