Search in sources :

Example 36 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestScannerFromBucketCache method testBasicScanWithOffheapBucketCacheWithMBB.

@Test
public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException {
    setUp(true, true);
    byte[] row1 = Bytes.toBytes("row1offheap");
    byte[] qf1 = Bytes.toBytes("qualifier1");
    byte[] qf2 = Bytes.toBytes("qualifier2");
    byte[] fam1 = Bytes.toBytes("famoffheap");
    // System.currentTimeMillis();
    long ts1 = 1;
    long ts2 = ts1 + 1;
    long ts3 = ts1 + 2;
    // Setting up region
    String method = this.getName();
    this.region = initHRegion(tableName, method, conf, test_util, fam1);
    try {
        List<Cell> expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true);
        List<Cell> actual = performScan(row1, fam1);
        // Verify result
        for (int i = 0; i < expected.size(); i++) {
            assertFalse(actual.get(i) instanceof ByteBufferKeyValue);
            assertTrue(CellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
        }
        // Wait for the bucket cache threads to move the data to offheap
        Thread.sleep(500);
        // do the scan again and verify. This time it should be from the bucket cache in offheap mode
        // but one of the cell will be copied due to the asSubByteBuff call
        Scan scan = new Scan(row1);
        scan.addFamily(fam1);
        scan.setMaxVersions(10);
        actual = new ArrayList<>();
        InternalScanner scanner = region.getScanner(scan);
        boolean hasNext = scanner.next(actual);
        assertEquals(false, hasNext);
        // Verify result
        for (int i = 0; i < expected.size(); i++) {
            if (i != 5) {
                // the last cell fetched will be of type shareable but not offheap because
                // the MBB is copied to form a single cell
                assertTrue(actual.get(i) instanceof ByteBufferKeyValue);
            }
        }
    } catch (InterruptedException e) {
    } finally {
        HBaseTestingUtility.closeRegionAndWAL(this.region);
        this.region = null;
    }
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 37 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class MultiTableInputFormatTestBase method testScan.

/**
   * Tests a MR scan using specific start and stop rows.
   *
   * @throws IOException
   * @throws ClassNotFoundException
   * @throws InterruptedException
   */
private void testScan(String start, String stop, String last) throws IOException, InterruptedException, ClassNotFoundException {
    String jobName = "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
    LOG.info("Before map/reduce startup - job " + jobName);
    Configuration c = new Configuration(TEST_UTIL.getConfiguration());
    c.set(KEY_STARTROW, start != null ? start : "");
    c.set(KEY_LASTROW, last != null ? last : "");
    List<Scan> scans = new ArrayList<>();
    for (String tableName : TABLES) {
        Scan scan = new Scan();
        scan.addFamily(INPUT_FAMILY);
        scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(tableName));
        if (start != null) {
            scan.setStartRow(Bytes.toBytes(start));
        }
        if (stop != null) {
            scan.setStopRow(Bytes.toBytes(stop));
        }
        scans.add(scan);
        LOG.info("scan before: " + scan);
    }
    runJob(jobName, c, scans);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan)

Example 38 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestTableMapReduceUtil method testInitTableMapperJob2.

@Test
public void testInitTableMapperJob2() throws Exception {
    Configuration configuration = new Configuration();
    Job job = new Job(configuration, "tableName");
    TableMapReduceUtil.initTableMapperJob(Bytes.toBytes("Table"), new Scan(), Import.Importer.class, Text.class, Text.class, job, false, WALInputFormat.class);
    assertEquals(WALInputFormat.class, job.getInputFormatClass());
    assertEquals(Import.Importer.class, job.getMapperClass());
    assertEquals(LongWritable.class, job.getOutputKeyClass());
    assertEquals(Text.class, job.getOutputValueClass());
    assertNull(job.getCombinerClass());
    assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE));
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Scan(org.apache.hadoop.hbase.client.Scan) Job(org.apache.hadoop.mapreduce.Job) Test(org.junit.Test)

Example 39 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestTableSnapshotInputFormat method doTestWithMapReduce.

// this is also called by the IntegrationTestTableSnapshotInputFormat
public static void doTestWithMapReduce(HBaseTestingUtility util, TableName tableName, String snapshotName, byte[] startRow, byte[] endRow, Path tableDir, int numRegions, int expectedNumSplits, boolean shutdownCluster) throws Exception {
    //create the table and snapshot
    createTableAndSnapshot(util, tableName, snapshotName, startRow, endRow, numRegions);
    if (shutdownCluster) {
        util.shutdownMiniHBaseCluster();
    }
    try {
        // create the job
        Job job = new Job(util.getConfiguration());
        // limit the scan
        Scan scan = new Scan(startRow, endRow);
        job.setJarByClass(util.getClass());
        TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), TestTableSnapshotInputFormat.class);
        TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName, scan, TestTableSnapshotMapper.class, ImmutableBytesWritable.class, NullWritable.class, job, true, tableDir);
        job.setReducerClass(TestTableSnapshotInputFormat.TestTableSnapshotReducer.class);
        job.setNumReduceTasks(1);
        job.setOutputFormatClass(NullOutputFormat.class);
        Assert.assertTrue(job.waitForCompletion(true));
    } finally {
        if (!shutdownCluster) {
            util.getAdmin().deleteSnapshot(snapshotName);
            util.deleteTable(tableName);
        }
    }
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan) Job(org.apache.hadoop.mapreduce.Job)

Example 40 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class TestTableSnapshotInputFormat method testRestoreSnapshotDoesNotCreateBackRefLinksInit.

@Override
public void testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName, String snapshotName, Path tmpTableDir) throws Exception {
    Job job = new Job(UTIL.getConfiguration());
    TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName, new Scan(), TestTableSnapshotMapper.class, ImmutableBytesWritable.class, NullWritable.class, job, false, tmpTableDir);
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan) Job(org.apache.hadoop.mapreduce.Job)

Aggregations

Scan (org.apache.hadoop.hbase.client.Scan)950 Test (org.junit.Test)495 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)302 Result (org.apache.hadoop.hbase.client.Result)286 Cell (org.apache.hadoop.hbase.Cell)258 ArrayList (java.util.ArrayList)238 Table (org.apache.hadoop.hbase.client.Table)178 Put (org.apache.hadoop.hbase.client.Put)161 BaseConnectionlessQueryTest (org.apache.phoenix.query.BaseConnectionlessQueryTest)153 IOException (java.io.IOException)135 TableName (org.apache.hadoop.hbase.TableName)98 Delete (org.apache.hadoop.hbase.client.Delete)95 Filter (org.apache.hadoop.hbase.filter.Filter)95 KeyValue (org.apache.hadoop.hbase.KeyValue)84 Connection (org.apache.hadoop.hbase.client.Connection)81 SkipScanFilter (org.apache.phoenix.filter.SkipScanFilter)78 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)78 RowKeyComparisonFilter (org.apache.phoenix.filter.RowKeyComparisonFilter)72 Configuration (org.apache.hadoop.conf.Configuration)51 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)51