Search in sources :

Example 1 with Result

use of org.apache.hadoop.hbase.client.Result in project camel by apache.

the class HBaseConsumer method poll.

@Override
protected int poll() throws Exception {
    try (Table table = endpoint.getTable()) {
        shutdownRunningTask = null;
        pendingExchanges = 0;
        Queue<Exchange> queue = new LinkedList<>();
        Scan scan = new Scan();
        List<Filter> filters = new LinkedList<>();
        if (endpoint.getFilters() != null) {
            filters.addAll(endpoint.getFilters());
        }
        if (maxMessagesPerPoll > 0) {
            filters.add(new PageFilter(maxMessagesPerPoll));
        }
        if (!filters.isEmpty()) {
            Filter compoundFilter = new FilterList(filters);
            scan.setFilter(compoundFilter);
        }
        if (rowModel != null && rowModel.getCells() != null) {
            Set<HBaseCell> cellModels = rowModel.getCells();
            for (HBaseCell cellModel : cellModels) {
                scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(cellModel.getFamily()), HBaseHelper.getHBaseFieldAsBytes(cellModel.getQualifier()));
            }
        }
        ResultScanner scanner = table.getScanner(scan);
        int exchangeCount = 0;
        // The next three statements are used just to get a reference to the BodyCellMappingStrategy instance.
        Exchange exchange = endpoint.createExchange();
        exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
        CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
        for (Result result = scanner.next(); (exchangeCount < maxMessagesPerPoll || maxMessagesPerPoll <= 0) && result != null; result = scanner.next()) {
            HBaseData data = new HBaseData();
            HBaseRow resultRow = new HBaseRow();
            resultRow.apply(rowModel);
            byte[] row = result.getRow();
            resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(rowModel.getRowType(), row));
            List<Cell> cells = result.listCells();
            if (cells != null) {
                Set<HBaseCell> cellModels = rowModel.getCells();
                if (cellModels.size() > 0) {
                    for (HBaseCell modelCell : cellModels) {
                        HBaseCell resultCell = new HBaseCell();
                        String family = modelCell.getFamily();
                        String column = modelCell.getQualifier();
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(modelCell.getValueType(), result.getValue(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column))));
                        resultCell.setFamily(modelCell.getFamily());
                        resultCell.setQualifier(modelCell.getQualifier());
                        resultRow.getCells().add(resultCell);
                    }
                } else {
                    // just need to put every key value into the result Cells
                    for (Cell cell : cells) {
                        String qualifier = new String(CellUtil.cloneQualifier(cell));
                        String family = new String(CellUtil.cloneFamily(cell));
                        HBaseCell resultCell = new HBaseCell();
                        resultCell.setFamily(family);
                        resultCell.setQualifier(qualifier);
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(String.class, CellUtil.cloneValue(cell)));
                        resultRow.getCells().add(resultCell);
                    }
                }
                data.getRows().add(resultRow);
                exchange = endpoint.createExchange();
                // Probably overkill but kept it here for consistency.
                exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
                mappingStrategy.applyScanResults(exchange.getIn(), data);
                //Make sure that there is a header containing the marked row ids, so that they can be deleted.
                exchange.getIn().setHeader(HBaseAttribute.HBASE_MARKED_ROW_ID.asHeader(), result.getRow());
                queue.add(exchange);
                exchangeCount++;
            }
        }
        scanner.close();
        return queue.isEmpty() ? 0 : processBatch(CastUtils.cast(queue));
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) CellMappingStrategy(org.apache.camel.component.hbase.mapping.CellMappingStrategy) FilterList(org.apache.hadoop.hbase.filter.FilterList) LinkedList(java.util.LinkedList) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Result(org.apache.hadoop.hbase.client.Result) Exchange(org.apache.camel.Exchange) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) HBaseData(org.apache.camel.component.hbase.model.HBaseData) HBaseRow(org.apache.camel.component.hbase.model.HBaseRow) Scan(org.apache.hadoop.hbase.client.Scan) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Cell(org.apache.hadoop.hbase.Cell)

Example 2 with Result

use of org.apache.hadoop.hbase.client.Result in project camel by apache.

the class HBaseConvertionsTest method testPutMultiRows.

@Test
public void testPutMultiRows() throws Exception {
    if (systemReady) {
        ProducerTemplate template = context.createProducerTemplate();
        Map<String, Object> headers = new HashMap<String, Object>();
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(), key[0]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(), body[0]);
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(2), key[1]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(2), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(2), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(2), body[1]);
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(3), key[2]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(3), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(3), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(3), body[2]);
        headers.put(HBaseConstants.OPERATION, HBaseConstants.PUT);
        template.sendBodyAndHeaders("direct:start", null, headers);
        Configuration configuration = hbaseUtil.getHBaseAdmin().getConfiguration();
        Connection conn = ConnectionFactory.createConnection(configuration);
        Table bar = conn.getTable(TableName.valueOf(PERSON_TABLE));
        Get get = new Get(Bytes.toBytes((Integer) key[0]));
        //Check row 1
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        Result result = bar.get(get);
        byte[] resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((Long) body[0]), resultValue);
        //Check row 2
        get = new Get(Bytes.toBytes((String) key[1]));
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        result = bar.get(get);
        resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((Boolean) body[1]), resultValue);
        //Check row 3
        get = new Get(Bytes.toBytes((String) key[2]));
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        result = bar.get(get);
        resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((String) body[2]), resultValue);
        IOHelper.close(bar);
    }
}
Also used : ProducerTemplate(org.apache.camel.ProducerTemplate) Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) Connection(org.apache.hadoop.hbase.client.Connection) Result(org.apache.hadoop.hbase.client.Result) Get(org.apache.hadoop.hbase.client.Get) Test(org.junit.Test)

Example 3 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class MetaTableAccessor method scanMeta.

/**
   * Performs a scan of META table.
   * @param connection connection we're using
   * @param startRow Where to start the scan. Pass null if want to begin scan
   *                 at first row.
   * @param stopRow Where to stop the scan. Pass null if want to scan all rows
   *                from the start one
   * @param type scanned part of meta
   * @param maxRows maximum rows to return
   * @param visitor Visitor invoked against each row.
   * @throws IOException
   */
public static void scanMeta(Connection connection, @Nullable final byte[] startRow, @Nullable final byte[] stopRow, QueryType type, int maxRows, final Visitor visitor) throws IOException {
    int rowUpperLimit = maxRows > 0 ? maxRows : Integer.MAX_VALUE;
    Scan scan = getMetaScan(connection, rowUpperLimit);
    for (byte[] family : type.getFamilies()) {
        scan.addFamily(family);
    }
    if (startRow != null)
        scan.setStartRow(startRow);
    if (stopRow != null)
        scan.setStopRow(stopRow);
    if (LOG.isTraceEnabled()) {
        LOG.trace("Scanning META" + " starting at row=" + Bytes.toStringBinary(startRow) + " stopping at row=" + Bytes.toStringBinary(stopRow) + " for max=" + rowUpperLimit + " with caching=" + scan.getCaching());
    }
    int currentRow = 0;
    try (Table metaTable = getMetaHTable(connection)) {
        try (ResultScanner scanner = metaTable.getScanner(scan)) {
            Result data;
            while ((data = scanner.next()) != null) {
                if (data.isEmpty())
                    continue;
                // Break if visit returns false.
                if (!visitor.visit(data))
                    break;
                if (++currentRow >= rowUpperLimit)
                    break;
            }
        }
    }
    if (visitor != null && visitor instanceof Closeable) {
        try {
            ((Closeable) visitor).close();
        } catch (Throwable t) {
            ExceptionUtil.rethrowIfInterrupt(t);
            LOG.debug("Got exception in closing the meta scanner visitor", t);
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Closeable(java.io.Closeable) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 4 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class MetaTableAccessor method getRegionsFromMergeQualifier.

/**
   * Get regions from the merge qualifier of the specified merged region
   * @return null if it doesn't contain merge qualifier, else two merge regions
   * @throws IOException
   */
@Nullable
public static Pair<HRegionInfo, HRegionInfo> getRegionsFromMergeQualifier(Connection connection, byte[] regionName) throws IOException {
    Result result = getRegionResult(connection, regionName);
    HRegionInfo mergeA = getHRegionInfo(result, HConstants.MERGEA_QUALIFIER);
    HRegionInfo mergeB = getHRegionInfo(result, HConstants.MERGEB_QUALIFIER);
    if (mergeA == null && mergeB == null) {
        return null;
    }
    return new Pair<>(mergeA, mergeB);
}
Also used : Result(org.apache.hadoop.hbase.client.Result) Pair(org.apache.hadoop.hbase.util.Pair) Nullable(edu.umd.cs.findbugs.annotations.Nullable)

Example 5 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class MetaTableAccessor method getReplicationPositionForAllPeer.

/**
   * Get replication positions for all peers in a region.
   * @param connection connection we're using
   * @param encodedRegionName region's encoded name
   * @return the map of positions for each peer
   */
public static Map<String, Long> getReplicationPositionForAllPeer(Connection connection, byte[] encodedRegionName) throws IOException {
    Get get = new Get(encodedRegionName);
    get.addFamily(HConstants.REPLICATION_POSITION_FAMILY);
    Result r = get(getMetaHTable(connection), get);
    Map<String, Long> map = new HashMap<>((int) (r.size() / 0.75 + 1));
    for (Cell c : r.listCells()) {
        map.put(Bytes.toString(c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength()), Bytes.toLong(c.getValueArray(), c.getValueOffset(), c.getValueLength()));
    }
    return map;
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Get(org.apache.hadoop.hbase.client.Get) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

Result (org.apache.hadoop.hbase.client.Result)715 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)286 Test (org.junit.Test)280 Scan (org.apache.hadoop.hbase.client.Scan)279 Get (org.apache.hadoop.hbase.client.Get)269 Table (org.apache.hadoop.hbase.client.Table)224 Put (org.apache.hadoop.hbase.client.Put)183 Cell (org.apache.hadoop.hbase.Cell)177 IOException (java.io.IOException)164 ArrayList (java.util.ArrayList)143 TableName (org.apache.hadoop.hbase.TableName)124 Connection (org.apache.hadoop.hbase.client.Connection)101 Delete (org.apache.hadoop.hbase.client.Delete)101 Configuration (org.apache.hadoop.conf.Configuration)70 KeyValue (org.apache.hadoop.hbase.KeyValue)70 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)62 InterruptedIOException (java.io.InterruptedIOException)58 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)50 CellScanner (org.apache.hadoop.hbase.CellScanner)47 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)45