Search in sources :

Example 1 with TScan

use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.

the class ThriftHBaseServiceHandler method getScannerResults.

@Override
public List<TResult> getScannerResults(ByteBuffer table, TScan scan, int numRows) throws TIOError, TException {
    Table htable = getTable(table);
    List<TResult> results = null;
    ResultScanner scanner = null;
    try {
        scanner = htable.getScanner(scanFromThrift(scan));
        results = resultsFromHBase(scanner.next(numRows));
    } catch (IOException e) {
        throw getTIOError(e);
    } finally {
        if (scanner != null) {
            scanner.close();
        }
        closeTable(htable);
    }
    return results;
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) IOException(java.io.IOException) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult)

Example 2 with TScan

use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.

the class TestThriftHBaseServiceHandler method testGetScannerResults.

@Test
public void testGetScannerResults() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    ByteBuffer table = wrap(tableAname);
    // insert data
    TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    List<TColumnValue> columnValues = new ArrayList<>(1);
    columnValues.add(columnValue);
    for (int i = 0; i < 20; i++) {
        TPut put = new TPut(wrap(("testGetScannerResults" + pad(i, (byte) 2)).getBytes()), columnValues);
        handler.put(table, put);
    }
    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<>(1);
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testGetScannerResults".getBytes());
    // get 5 rows and check the returned results
    scan.setStopRow("testGetScannerResults05".getBytes());
    List<TResult> results = handler.getScannerResults(table, scan, 5);
    assertEquals(5, results.size());
    for (int i = 0; i < 5; i++) {
        // check if the rows are returned and in order
        assertArrayEquals(("testGetScannerResults" + pad(i, (byte) 2)).getBytes(), results.get(i).getRow());
    }
    // get 10 rows and check the returned results
    scan.setStopRow("testGetScannerResults10".getBytes());
    results = handler.getScannerResults(table, scan, 10);
    assertEquals(10, results.size());
    for (int i = 0; i < 10; i++) {
        // check if the rows are returned and in order
        assertArrayEquals(("testGetScannerResults" + pad(i, (byte) 2)).getBytes(), results.get(i).getRow());
    }
    // get 20 rows and check the returned results
    scan.setStopRow("testGetScannerResults20".getBytes());
    results = handler.getScannerResults(table, scan, 20);
    assertEquals(20, results.size());
    for (int i = 0; i < 20; i++) {
        // check if the rows are returned and in order
        assertArrayEquals(("testGetScannerResults" + pad(i, (byte) 2)).getBytes(), results.get(i).getRow());
    }
    // reverse scan
    scan = new TScan();
    scan.setColumns(columns);
    scan.setReversed(true);
    scan.setStartRow("testGetScannerResults20".getBytes());
    scan.setStopRow("testGetScannerResults".getBytes());
    results = handler.getScannerResults(table, scan, 20);
    assertEquals(20, results.size());
    for (int i = 0; i < 20; i++) {
        // check if the rows are returned and in order
        assertArrayEquals(("testGetScannerResults" + pad(19 - i, (byte) 2)).getBytes(), results.get(i).getRow());
    }
}
Also used : TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) ArrayList(java.util.ArrayList) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) Test(org.junit.Test)

Example 3 with TScan

use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.

the class TestThriftHBaseServiceHandler method testScanWithBatchSize.

@Test
public void testScanWithBatchSize() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    ByteBuffer table = wrap(tableAname);
    // insert data
    List<TColumnValue> columnValues = new ArrayList<>(100);
    for (int i = 0; i < 100; i++) {
        String colNum = pad(i, (byte) 3);
        TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(("col" + colNum).getBytes()), wrap(("val" + colNum).getBytes()));
        columnValues.add(columnValue);
    }
    TPut put = new TPut(wrap(("testScanWithBatchSize").getBytes()), columnValues);
    handler.put(table, put);
    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<>(1);
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScanWithBatchSize".getBytes());
    scan.setStopRow("testScanWithBatchSize￿".getBytes());
    // set batch size to 10 columns per call
    scan.setBatchSize(10);
    // get scanner
    int scanId = handler.openScanner(table, scan);
    List<TResult> results = null;
    for (int i = 0; i < 10; i++) {
        // get batch for single row (10x10 is what we expect)
        results = handler.getScannerRows(scanId, 1);
        assertEquals(1, results.size());
        // check length of batch
        List<TColumnValue> cols = results.get(0).getColumnValues();
        assertEquals(10, cols.size());
        // check if the columns are returned and in order
        for (int y = 0; y < 10; y++) {
            int colNum = y + (10 * i);
            String colNumPad = pad(colNum, (byte) 3);
            assertArrayEquals(("col" + colNumPad).getBytes(), cols.get(y).getQualifier());
        }
    }
    // check that we are at the end of the scan
    results = handler.getScannerRows(scanId, 1);
    assertEquals(0, results.size());
    // close scanner and check that it was indeed closed
    handler.closeScanner(scanId);
    try {
        handler.getScannerRows(scanId, 1);
        fail("Scanner id should be invalid");
    } catch (TIllegalArgument e) {
    }
}
Also used : TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TIllegalArgument(org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 4 with TScan

use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.

the class TestThriftHBaseServiceHandler method testScanWithFilter.

@Test
public void testScanWithFilter() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    ByteBuffer table = wrap(tableAname);
    // insert data
    TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    List<TColumnValue> columnValues = new ArrayList<>(1);
    columnValues.add(columnValue);
    for (int i = 0; i < 10; i++) {
        TPut put = new TPut(wrap(("testScanWithFilter" + i).getBytes()), columnValues);
        handler.put(table, put);
    }
    // create scan instance with filter
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<>(1);
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScanWithFilter".getBytes());
    scan.setStopRow("testScanWithFilter￿".getBytes());
    // only get the key part
    scan.setFilterString(wrap(("KeyOnlyFilter()").getBytes()));
    // get scanner and rows
    int scanId = handler.openScanner(table, scan);
    List<TResult> results = handler.getScannerRows(scanId, 10);
    assertEquals(10, results.size());
    for (int i = 0; i < 10; i++) {
        // check if the rows are returned and in order
        assertArrayEquals(("testScanWithFilter" + i).getBytes(), results.get(i).getRow());
        // check that the value is indeed stripped by the filter
        assertEquals(0, results.get(i).getColumnValues().get(0).getValue().length);
    }
    // check that we are at the end of the scan
    results = handler.getScannerRows(scanId, 10);
    assertEquals(0, results.size());
    // close scanner and check that it was indeed closed
    handler.closeScanner(scanId);
    try {
        handler.getScannerRows(scanId, 10);
        fail("Scanner id should be invalid");
    } catch (TIllegalArgument e) {
    }
}
Also used : TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TIllegalArgument(org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 5 with TScan

use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.

the class TestThriftHBaseServiceHandler method testLongLivedScan.

/**
   * Tests keeping a HBase scanner alive for long periods of time. Each call to getScannerRow()
   * should reset the ConnectionCache timeout for the scanner's connection
   * @throws Exception
   */
@Test
public void testLongLivedScan() throws Exception {
    int numTrials = 6;
    int trialPause = 1000;
    int cleanUpInterval = 100;
    Configuration conf = new Configuration(UTIL.getConfiguration());
    // Set the ConnectionCache timeout to trigger halfway through the trials
    conf.setInt(ThriftHBaseServiceHandler.MAX_IDLETIME, (numTrials / 2) * trialPause);
    conf.setInt(ThriftHBaseServiceHandler.CLEANUP_INTERVAL, cleanUpInterval);
    ThriftHBaseServiceHandler handler = new ThriftHBaseServiceHandler(conf, UserProvider.instantiate(conf));
    ByteBuffer table = wrap(tableAname);
    // insert data
    TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    List<TColumnValue> columnValues = new ArrayList<>(1);
    columnValues.add(columnValue);
    for (int i = 0; i < numTrials; i++) {
        TPut put = new TPut(wrap(("testScan" + i).getBytes()), columnValues);
        handler.put(table, put);
    }
    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<>(1);
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScan".getBytes());
    scan.setStopRow("testScan￿".getBytes());
    // Prevent the scanner from caching results
    scan.setCaching(1);
    // get scanner and rows
    int scanId = handler.openScanner(table, scan);
    for (int i = 0; i < numTrials; i++) {
        // Make sure that the Scanner doesn't throw an exception after the ConnectionCache timeout
        List<TResult> results = handler.getScannerRows(scanId, 1);
        assertArrayEquals(("testScan" + i).getBytes(), results.get(0).getRow());
        Thread.sleep(trialPause);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Aggregations

ByteBuffer (java.nio.ByteBuffer)11 TScan (org.apache.hadoop.hbase.thrift2.generated.TScan)11 ArrayList (java.util.ArrayList)10 TColumnValue (org.apache.hadoop.hbase.thrift2.generated.TColumnValue)10 TPut (org.apache.hadoop.hbase.thrift2.generated.TPut)10 TResult (org.apache.hadoop.hbase.thrift2.generated.TResult)10 Test (org.junit.Test)10 TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)9 TIllegalArgument (org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument)6 HashMap (java.util.HashMap)2 Scan (org.apache.hadoop.hbase.client.Scan)2 TAuthorization (org.apache.hadoop.hbase.thrift2.generated.TAuthorization)2 TCellVisibility (org.apache.hadoop.hbase.thrift2.generated.TCellVisibility)2 TTimeRange (org.apache.hadoop.hbase.thrift2.generated.TTimeRange)2 IOException (java.io.IOException)1 Map (java.util.Map)1 Configuration (org.apache.hadoop.conf.Configuration)1 Delete (org.apache.hadoop.hbase.client.Delete)1 Get (org.apache.hadoop.hbase.client.Get)1 Increment (org.apache.hadoop.hbase.client.Increment)1