Search in sources :

Example 1 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project camel by apache.

the class HBaseConsumer method poll.

@Override
protected int poll() throws Exception {
    try (Table table = endpoint.getTable()) {
        shutdownRunningTask = null;
        pendingExchanges = 0;
        Queue<Exchange> queue = new LinkedList<>();
        Scan scan = new Scan();
        List<Filter> filters = new LinkedList<>();
        if (endpoint.getFilters() != null) {
            filters.addAll(endpoint.getFilters());
        }
        if (maxMessagesPerPoll > 0) {
            filters.add(new PageFilter(maxMessagesPerPoll));
        }
        if (!filters.isEmpty()) {
            Filter compoundFilter = new FilterList(filters);
            scan.setFilter(compoundFilter);
        }
        if (rowModel != null && rowModel.getCells() != null) {
            Set<HBaseCell> cellModels = rowModel.getCells();
            for (HBaseCell cellModel : cellModels) {
                scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(cellModel.getFamily()), HBaseHelper.getHBaseFieldAsBytes(cellModel.getQualifier()));
            }
        }
        ResultScanner scanner = table.getScanner(scan);
        int exchangeCount = 0;
        // The next three statements are used just to get a reference to the BodyCellMappingStrategy instance.
        Exchange exchange = endpoint.createExchange();
        exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
        CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
        for (Result result = scanner.next(); (exchangeCount < maxMessagesPerPoll || maxMessagesPerPoll <= 0) && result != null; result = scanner.next()) {
            HBaseData data = new HBaseData();
            HBaseRow resultRow = new HBaseRow();
            resultRow.apply(rowModel);
            byte[] row = result.getRow();
            resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(rowModel.getRowType(), row));
            List<Cell> cells = result.listCells();
            if (cells != null) {
                Set<HBaseCell> cellModels = rowModel.getCells();
                if (cellModels.size() > 0) {
                    for (HBaseCell modelCell : cellModels) {
                        HBaseCell resultCell = new HBaseCell();
                        String family = modelCell.getFamily();
                        String column = modelCell.getQualifier();
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(modelCell.getValueType(), result.getValue(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column))));
                        resultCell.setFamily(modelCell.getFamily());
                        resultCell.setQualifier(modelCell.getQualifier());
                        resultRow.getCells().add(resultCell);
                    }
                } else {
                    // just need to put every key value into the result Cells
                    for (Cell cell : cells) {
                        String qualifier = new String(CellUtil.cloneQualifier(cell));
                        String family = new String(CellUtil.cloneFamily(cell));
                        HBaseCell resultCell = new HBaseCell();
                        resultCell.setFamily(family);
                        resultCell.setQualifier(qualifier);
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(String.class, CellUtil.cloneValue(cell)));
                        resultRow.getCells().add(resultCell);
                    }
                }
                data.getRows().add(resultRow);
                exchange = endpoint.createExchange();
                // Probably overkill but kept it here for consistency.
                exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
                mappingStrategy.applyScanResults(exchange.getIn(), data);
                //Make sure that there is a header containing the marked row ids, so that they can be deleted.
                exchange.getIn().setHeader(HBaseAttribute.HBASE_MARKED_ROW_ID.asHeader(), result.getRow());
                queue.add(exchange);
                exchangeCount++;
            }
        }
        scanner.close();
        return queue.isEmpty() ? 0 : processBatch(CastUtils.cast(queue));
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) CellMappingStrategy(org.apache.camel.component.hbase.mapping.CellMappingStrategy) FilterList(org.apache.hadoop.hbase.filter.FilterList) LinkedList(java.util.LinkedList) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Result(org.apache.hadoop.hbase.client.Result) Exchange(org.apache.camel.Exchange) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) HBaseData(org.apache.camel.component.hbase.model.HBaseData) HBaseRow(org.apache.camel.component.hbase.model.HBaseRow) Scan(org.apache.hadoop.hbase.client.Scan) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Cell(org.apache.hadoop.hbase.Cell)

Example 2 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class MetaTableAccessor method scanMeta.

/**
   * Performs a scan of META table.
   * @param connection connection we're using
   * @param startRow Where to start the scan. Pass null if want to begin scan
   *                 at first row.
   * @param stopRow Where to stop the scan. Pass null if want to scan all rows
   *                from the start one
   * @param type scanned part of meta
   * @param maxRows maximum rows to return
   * @param visitor Visitor invoked against each row.
   * @throws IOException
   */
public static void scanMeta(Connection connection, @Nullable final byte[] startRow, @Nullable final byte[] stopRow, QueryType type, int maxRows, final Visitor visitor) throws IOException {
    int rowUpperLimit = maxRows > 0 ? maxRows : Integer.MAX_VALUE;
    Scan scan = getMetaScan(connection, rowUpperLimit);
    for (byte[] family : type.getFamilies()) {
        scan.addFamily(family);
    }
    if (startRow != null)
        scan.setStartRow(startRow);
    if (stopRow != null)
        scan.setStopRow(stopRow);
    if (LOG.isTraceEnabled()) {
        LOG.trace("Scanning META" + " starting at row=" + Bytes.toStringBinary(startRow) + " stopping at row=" + Bytes.toStringBinary(stopRow) + " for max=" + rowUpperLimit + " with caching=" + scan.getCaching());
    }
    int currentRow = 0;
    try (Table metaTable = getMetaHTable(connection)) {
        try (ResultScanner scanner = metaTable.getScanner(scan)) {
            Result data;
            while ((data = scanner.next()) != null) {
                if (data.isEmpty())
                    continue;
                // Break if visit returns false.
                if (!visitor.visit(data))
                    break;
                if (++currentRow >= rowUpperLimit)
                    break;
            }
        }
    }
    if (visitor != null && visitor instanceof Closeable) {
        try {
            ((Closeable) visitor).close();
        } catch (Throwable t) {
            ExceptionUtil.rethrowIfInterrupt(t);
            LOG.debug("Got exception in closing the meta scanner visitor", t);
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Closeable(java.io.Closeable) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 3 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class MetaTableAccessor method getClosestRegionInfo.

/**
   * @return Get closest metatable region row to passed <code>row</code>
   * @throws java.io.IOException
   */
@NonNull
public static HRegionInfo getClosestRegionInfo(Connection connection, @NonNull final TableName tableName, @NonNull final byte[] row) throws IOException {
    byte[] searchRow = HRegionInfo.createRegionName(tableName, row, HConstants.NINES, false);
    Scan scan = getMetaScan(connection, 1);
    scan.setReversed(true);
    scan.setStartRow(searchRow);
    try (ResultScanner resultScanner = getMetaHTable(connection).getScanner(scan)) {
        Result result = resultScanner.next();
        if (result == null) {
            throw new TableNotFoundException("Cannot find row in META " + " for table: " + tableName + ", row=" + Bytes.toStringBinary(row));
        }
        HRegionInfo regionInfo = getHRegionInfo(result);
        if (regionInfo == null) {
            throw new IOException("HRegionInfo was null or empty in Meta for " + tableName + ", row=" + Bytes.toStringBinary(row));
        }
        return regionInfo;
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) NonNull(edu.umd.cs.findbugs.annotations.NonNull)

Example 4 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class MetaTableAccessor method getAllBarriers.

/**
   * Get all barriers in all regions.
   * @return a map of barrier lists in all regions
   * @throws IOException
   */
public static Map<String, List<Long>> getAllBarriers(Connection connection) throws IOException {
    Map<String, List<Long>> map = new HashMap<>();
    Scan scan = new Scan();
    scan.addFamily(HConstants.REPLICATION_BARRIER_FAMILY);
    try (Table t = getMetaHTable(connection);
        ResultScanner scanner = t.getScanner(scan)) {
        Result result;
        while ((result = scanner.next()) != null) {
            String key = Bytes.toString(result.getRow());
            List<Long> list = new ArrayList<>(result.rawCells().length);
            for (Cell cell : result.rawCells()) {
                list.add(Bytes.toLong(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
            }
            map.put(key, list);
        }
    }
    return map;
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ArrayList(java.util.ArrayList) List(java.util.List) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 5 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestRemoteTable method testIteratorScaner.

/**
   * Test RemoteHable.Scanner.iterator method  
   */
@Test
public void testIteratorScaner() throws IOException {
    List<Put> puts = new ArrayList<>(4);
    Put put = new Put(ROW_1);
    put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
    puts.add(put);
    put = new Put(ROW_2);
    put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
    puts.add(put);
    put = new Put(ROW_3);
    put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
    puts.add(put);
    put = new Put(ROW_4);
    put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
    puts.add(put);
    remoteTable.put(puts);
    ResultScanner scanner = remoteTable.getScanner(new Scan());
    Iterator<Result> iterator = scanner.iterator();
    assertTrue(iterator.hasNext());
    int counter = 0;
    while (iterator.hasNext()) {
        iterator.next();
        counter++;
    }
    assertEquals(4, counter);
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Aggregations

ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)326 Scan (org.apache.hadoop.hbase.client.Scan)295 Result (org.apache.hadoop.hbase.client.Result)286 Table (org.apache.hadoop.hbase.client.Table)160 Test (org.junit.Test)143 Cell (org.apache.hadoop.hbase.Cell)104 IOException (java.io.IOException)102 TableName (org.apache.hadoop.hbase.TableName)88 Connection (org.apache.hadoop.hbase.client.Connection)75 Put (org.apache.hadoop.hbase.client.Put)75 Delete (org.apache.hadoop.hbase.client.Delete)70 ArrayList (java.util.ArrayList)61 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)47 InterruptedIOException (java.io.InterruptedIOException)46 CellScanner (org.apache.hadoop.hbase.CellScanner)42 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)31 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)29 HTable (org.apache.hadoop.hbase.client.HTable)29 Get (org.apache.hadoop.hbase.client.Get)23 Admin (org.apache.hadoop.hbase.client.Admin)22