Search in sources :

Example 1 with Table

use of org.apache.hadoop.hbase.client.Table in project camel by apache.

the class HBaseConsumer method poll.

@Override
protected int poll() throws Exception {
    try (Table table = endpoint.getTable()) {
        shutdownRunningTask = null;
        pendingExchanges = 0;
        Queue<Exchange> queue = new LinkedList<>();
        Scan scan = new Scan();
        List<Filter> filters = new LinkedList<>();
        if (endpoint.getFilters() != null) {
            filters.addAll(endpoint.getFilters());
        }
        if (maxMessagesPerPoll > 0) {
            filters.add(new PageFilter(maxMessagesPerPoll));
        }
        if (!filters.isEmpty()) {
            Filter compoundFilter = new FilterList(filters);
            scan.setFilter(compoundFilter);
        }
        if (rowModel != null && rowModel.getCells() != null) {
            Set<HBaseCell> cellModels = rowModel.getCells();
            for (HBaseCell cellModel : cellModels) {
                scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(cellModel.getFamily()), HBaseHelper.getHBaseFieldAsBytes(cellModel.getQualifier()));
            }
        }
        ResultScanner scanner = table.getScanner(scan);
        int exchangeCount = 0;
        // The next three statements are used just to get a reference to the BodyCellMappingStrategy instance.
        Exchange exchange = endpoint.createExchange();
        exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
        CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
        for (Result result = scanner.next(); (exchangeCount < maxMessagesPerPoll || maxMessagesPerPoll <= 0) && result != null; result = scanner.next()) {
            HBaseData data = new HBaseData();
            HBaseRow resultRow = new HBaseRow();
            resultRow.apply(rowModel);
            byte[] row = result.getRow();
            resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(rowModel.getRowType(), row));
            List<Cell> cells = result.listCells();
            if (cells != null) {
                Set<HBaseCell> cellModels = rowModel.getCells();
                if (cellModels.size() > 0) {
                    for (HBaseCell modelCell : cellModels) {
                        HBaseCell resultCell = new HBaseCell();
                        String family = modelCell.getFamily();
                        String column = modelCell.getQualifier();
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(modelCell.getValueType(), result.getValue(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column))));
                        resultCell.setFamily(modelCell.getFamily());
                        resultCell.setQualifier(modelCell.getQualifier());
                        resultRow.getCells().add(resultCell);
                    }
                } else {
                    // just need to put every key value into the result Cells
                    for (Cell cell : cells) {
                        String qualifier = new String(CellUtil.cloneQualifier(cell));
                        String family = new String(CellUtil.cloneFamily(cell));
                        HBaseCell resultCell = new HBaseCell();
                        resultCell.setFamily(family);
                        resultCell.setQualifier(qualifier);
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(String.class, CellUtil.cloneValue(cell)));
                        resultRow.getCells().add(resultCell);
                    }
                }
                data.getRows().add(resultRow);
                exchange = endpoint.createExchange();
                // Probably overkill but kept it here for consistency.
                exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
                mappingStrategy.applyScanResults(exchange.getIn(), data);
                //Make sure that there is a header containing the marked row ids, so that they can be deleted.
                exchange.getIn().setHeader(HBaseAttribute.HBASE_MARKED_ROW_ID.asHeader(), result.getRow());
                queue.add(exchange);
                exchangeCount++;
            }
        }
        scanner.close();
        return queue.isEmpty() ? 0 : processBatch(CastUtils.cast(queue));
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) CellMappingStrategy(org.apache.camel.component.hbase.mapping.CellMappingStrategy) FilterList(org.apache.hadoop.hbase.filter.FilterList) LinkedList(java.util.LinkedList) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Result(org.apache.hadoop.hbase.client.Result) Exchange(org.apache.camel.Exchange) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) HBaseData(org.apache.camel.component.hbase.model.HBaseData) HBaseRow(org.apache.camel.component.hbase.model.HBaseRow) Scan(org.apache.hadoop.hbase.client.Scan) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Cell(org.apache.hadoop.hbase.Cell)

Example 2 with Table

use of org.apache.hadoop.hbase.client.Table in project camel by apache.

the class HBaseProducer method process.

public void process(Exchange exchange) throws Exception {
    try (Table table = endpoint.getTable()) {
        updateHeaders(exchange);
        String operation = (String) exchange.getIn().getHeader(HBaseConstants.OPERATION);
        Integer maxScanResult = exchange.getIn().getHeader(HBaseConstants.HBASE_MAX_SCAN_RESULTS, Integer.class);
        String fromRowId = (String) exchange.getIn().getHeader(HBaseConstants.FROM_ROW);
        String stopRowId = (String) exchange.getIn().getHeader(HBaseConstants.STOP_ROW);
        CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
        HBaseData data = mappingStrategy.resolveModel(exchange.getIn());
        List<Put> putOperations = new LinkedList<>();
        List<Delete> deleteOperations = new LinkedList<>();
        List<HBaseRow> getOperationResult = new LinkedList<>();
        List<HBaseRow> scanOperationResult = new LinkedList<>();
        for (HBaseRow hRow : data.getRows()) {
            hRow.apply(rowModel);
            if (HBaseConstants.PUT.equals(operation)) {
                putOperations.add(createPut(hRow));
            } else if (HBaseConstants.GET.equals(operation)) {
                HBaseRow getResultRow = getCells(table, hRow);
                getOperationResult.add(getResultRow);
            } else if (HBaseConstants.DELETE.equals(operation)) {
                deleteOperations.add(createDeleteRow(hRow));
            } else if (HBaseConstants.SCAN.equals(operation)) {
                scanOperationResult = scanCells(table, hRow, fromRowId, stopRowId, maxScanResult, endpoint.getFilters());
            }
        }
        //Check if we have something to add.
        if (!putOperations.isEmpty()) {
            table.put(putOperations);
        } else if (!deleteOperations.isEmpty()) {
            table.delete(deleteOperations);
        } else if (!getOperationResult.isEmpty()) {
            mappingStrategy.applyGetResults(exchange.getOut(), new HBaseData(getOperationResult));
        } else if (!scanOperationResult.isEmpty()) {
            mappingStrategy.applyScanResults(exchange.getOut(), new HBaseData(scanOperationResult));
        }
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) CellMappingStrategy(org.apache.camel.component.hbase.mapping.CellMappingStrategy) HBaseData(org.apache.camel.component.hbase.model.HBaseData) HBaseRow(org.apache.camel.component.hbase.model.HBaseRow) LinkedList(java.util.LinkedList) Put(org.apache.hadoop.hbase.client.Put)

Example 3 with Table

use of org.apache.hadoop.hbase.client.Table in project camel by apache.

the class CamelHBaseTestSupport method putMultipleRows.

protected void putMultipleRows() throws IOException {
    Configuration configuration = hbaseUtil.getHBaseAdmin().getConfiguration();
    Connection connection = ConnectionFactory.createConnection(configuration);
    Table table = connection.getTable(TableName.valueOf(PERSON_TABLE.getBytes()));
    for (int r = 0; r < key.length; r++) {
        Put put = new Put(key[r].getBytes());
        put.addColumn(family[0].getBytes(), column[0][0].getBytes(), body[r][0][0].getBytes());
        table.put(put);
    }
    IOHelper.close(table);
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) Connection(org.apache.hadoop.hbase.client.Connection) Put(org.apache.hadoop.hbase.client.Put)

Example 4 with Table

use of org.apache.hadoop.hbase.client.Table in project camel by apache.

the class HBaseConvertionsTest method testPutMultiRows.

@Test
public void testPutMultiRows() throws Exception {
    if (systemReady) {
        ProducerTemplate template = context.createProducerTemplate();
        Map<String, Object> headers = new HashMap<String, Object>();
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(), key[0]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(), body[0]);
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(2), key[1]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(2), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(2), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(2), body[1]);
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(3), key[2]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(3), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(3), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(3), body[2]);
        headers.put(HBaseConstants.OPERATION, HBaseConstants.PUT);
        template.sendBodyAndHeaders("direct:start", null, headers);
        Configuration configuration = hbaseUtil.getHBaseAdmin().getConfiguration();
        Connection conn = ConnectionFactory.createConnection(configuration);
        Table bar = conn.getTable(TableName.valueOf(PERSON_TABLE));
        Get get = new Get(Bytes.toBytes((Integer) key[0]));
        //Check row 1
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        Result result = bar.get(get);
        byte[] resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((Long) body[0]), resultValue);
        //Check row 2
        get = new Get(Bytes.toBytes((String) key[1]));
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        result = bar.get(get);
        resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((Boolean) body[1]), resultValue);
        //Check row 3
        get = new Get(Bytes.toBytes((String) key[2]));
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        result = bar.get(get);
        resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((String) body[2]), resultValue);
        IOHelper.close(bar);
    }
}
Also used : ProducerTemplate(org.apache.camel.ProducerTemplate) Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) Connection(org.apache.hadoop.hbase.client.Connection) Result(org.apache.hadoop.hbase.client.Result) Get(org.apache.hadoop.hbase.client.Get) Test(org.junit.Test)

Example 5 with Table

use of org.apache.hadoop.hbase.client.Table in project hbase by apache.

the class MetaTableAccessor method scanMeta.

/**
   * Performs a scan of META table.
   * @param connection connection we're using
   * @param startRow Where to start the scan. Pass null if want to begin scan
   *                 at first row.
   * @param stopRow Where to stop the scan. Pass null if want to scan all rows
   *                from the start one
   * @param type scanned part of meta
   * @param maxRows maximum rows to return
   * @param visitor Visitor invoked against each row.
   * @throws IOException
   */
public static void scanMeta(Connection connection, @Nullable final byte[] startRow, @Nullable final byte[] stopRow, QueryType type, int maxRows, final Visitor visitor) throws IOException {
    int rowUpperLimit = maxRows > 0 ? maxRows : Integer.MAX_VALUE;
    Scan scan = getMetaScan(connection, rowUpperLimit);
    for (byte[] family : type.getFamilies()) {
        scan.addFamily(family);
    }
    if (startRow != null)
        scan.setStartRow(startRow);
    if (stopRow != null)
        scan.setStopRow(stopRow);
    if (LOG.isTraceEnabled()) {
        LOG.trace("Scanning META" + " starting at row=" + Bytes.toStringBinary(startRow) + " stopping at row=" + Bytes.toStringBinary(stopRow) + " for max=" + rowUpperLimit + " with caching=" + scan.getCaching());
    }
    int currentRow = 0;
    try (Table metaTable = getMetaHTable(connection)) {
        try (ResultScanner scanner = metaTable.getScanner(scan)) {
            Result data;
            while ((data = scanner.next()) != null) {
                if (data.isEmpty())
                    continue;
                // Break if visit returns false.
                if (!visitor.visit(data))
                    break;
                if (++currentRow >= rowUpperLimit)
                    break;
            }
        }
    }
    if (visitor != null && visitor instanceof Closeable) {
        try {
            ((Closeable) visitor).close();
        } catch (Throwable t) {
            ExceptionUtil.rethrowIfInterrupt(t);
            LOG.debug("Got exception in closing the meta scanner visitor", t);
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Closeable(java.io.Closeable) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

Table (org.apache.hadoop.hbase.client.Table)660 Test (org.junit.Test)421 Put (org.apache.hadoop.hbase.client.Put)237 TableName (org.apache.hadoop.hbase.TableName)227 Result (org.apache.hadoop.hbase.client.Result)224 Connection (org.apache.hadoop.hbase.client.Connection)191 Scan (org.apache.hadoop.hbase.client.Scan)174 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)160 IOException (java.io.IOException)157 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)134 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)119 Get (org.apache.hadoop.hbase.client.Get)107 Delete (org.apache.hadoop.hbase.client.Delete)99 Admin (org.apache.hadoop.hbase.client.Admin)95 ArrayList (java.util.ArrayList)85 Cell (org.apache.hadoop.hbase.Cell)83 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)73 Configuration (org.apache.hadoop.conf.Configuration)71 Path (org.apache.hadoop.fs.Path)60 RegionLocator (org.apache.hadoop.hbase.client.RegionLocator)59