Search in sources :

Example 1 with ParseFilter

use of org.apache.hadoop.hbase.filter.ParseFilter in project hbase by apache.

the class ThriftUtilities method getFromThrift.

/**
 * Creates a {@link Get} (HBase) from a {@link TGet} (Thrift).
 *
 * This ignores any timestamps set on {@link TColumn} objects.
 *
 * @param in the <code>TGet</code> to convert
 *
 * @return <code>Get</code> object
 *
 * @throws IOException if an invalid time range or max version parameter is given
 */
public static Get getFromThrift(TGet in) throws IOException {
    Get out = new Get(in.getRow());
    // Timestamp overwrites time range if both are set
    if (in.isSetTimestamp()) {
        out.setTimestamp(in.getTimestamp());
    } else if (in.isSetTimeRange()) {
        out.setTimeRange(in.getTimeRange().getMinStamp(), in.getTimeRange().getMaxStamp());
    }
    if (in.isSetMaxVersions()) {
        out.readVersions(in.getMaxVersions());
    }
    if (in.isSetFilterString()) {
        ParseFilter parseFilter = new ParseFilter();
        out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
    }
    if (in.isSetAttributes()) {
        addAttributes(out, in.getAttributes());
    }
    if (in.isSetAuthorizations()) {
        out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
    }
    if (in.isSetConsistency()) {
        out.setConsistency(consistencyFromThrift(in.getConsistency()));
    }
    if (in.isSetTargetReplicaId()) {
        out.setReplicaId(in.getTargetReplicaId());
    }
    if (in.isSetCacheBlocks()) {
        out.setCacheBlocks(in.isCacheBlocks());
    }
    if (in.isSetStoreLimit()) {
        out.setMaxResultsPerColumnFamily(in.getStoreLimit());
    }
    if (in.isSetStoreOffset()) {
        out.setRowOffsetPerColumnFamily(in.getStoreOffset());
    }
    if (in.isSetExistence_only()) {
        out.setCheckExistenceOnly(in.isExistence_only());
    }
    if (in.isSetColumns()) {
        for (TColumn column : in.getColumns()) {
            if (column.isSetQualifier()) {
                out.addColumn(column.getFamily(), column.getQualifier());
            } else {
                out.addFamily(column.getFamily());
            }
        }
    }
    if (in.isSetFilterBytes()) {
        out.setFilter(filterFromThrift(in.getFilterBytes()));
    }
    return out;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) Get(org.apache.hadoop.hbase.client.Get) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet)

Example 2 with ParseFilter

use of org.apache.hadoop.hbase.filter.ParseFilter in project hbase by apache.

the class ThriftHBaseServiceHandler method scannerOpenWithScan.

@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
    Table table = null;
    try {
        table = getTable(tableName);
        Scan scan = new Scan();
        addAttributes(scan, attributes);
        if (tScan.isSetStartRow()) {
            scan.withStartRow(tScan.getStartRow());
        }
        if (tScan.isSetStopRow()) {
            scan.withStopRow(tScan.getStopRow());
        }
        if (tScan.isSetTimestamp()) {
            scan.setTimeRange(0, tScan.getTimestamp());
        }
        if (tScan.isSetCaching()) {
            scan.setCaching(tScan.getCaching());
        }
        if (tScan.isSetBatchSize()) {
            scan.setBatch(tScan.getBatchSize());
        }
        if (tScan.isSetColumns() && !tScan.getColumns().isEmpty()) {
            for (ByteBuffer column : tScan.getColumns()) {
                byte[][] famQf = CellUtil.parseColumn(getBytes(column));
                if (famQf.length == 1) {
                    scan.addFamily(famQf[0]);
                } else {
                    scan.addColumn(famQf[0], famQf[1]);
                }
            }
        }
        if (tScan.isSetFilterString()) {
            ParseFilter parseFilter = new ParseFilter();
            scan.setFilter(parseFilter.parseFilterString(tScan.getFilterString()));
        }
        if (tScan.isSetReversed()) {
            scan.setReversed(tScan.isReversed());
        }
        if (tScan.isSetCacheBlocks()) {
            scan.setCacheBlocks(tScan.isCacheBlocks());
        }
        return addScanner(table.getScanner(scan), tScan.sortColumns);
    } catch (IOException e) {
        LOG.warn(e.getMessage(), e);
        throw getIOError(e);
    } finally {
        closeTable(table);
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) TScan(org.apache.hadoop.hbase.thrift.generated.TScan) Scan(org.apache.hadoop.hbase.client.Scan) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer)

Example 3 with ParseFilter

use of org.apache.hadoop.hbase.filter.ParseFilter in project hbase by apache.

the class TableResource method getScanResource.

@Path("{scanspec: .*[*]$}")
public TableScanResource getScanResource(@Context final UriInfo uriInfo, @PathParam("scanspec") final String scanSpec, @HeaderParam("Accept") final String contentType, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @DefaultValue("") @QueryParam(Constants.SCAN_END_ROW) String endRow, @DefaultValue("") @QueryParam(Constants.SCAN_COLUMN) List<String> column, @DefaultValue("1") @QueryParam(Constants.SCAN_MAX_VERSIONS) int maxVersions, @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
    try {
        Filter filter = null;
        Scan tableScan = new Scan();
        if (scanSpec.indexOf('*') > 0) {
            String prefix = scanSpec.substring(0, scanSpec.indexOf('*'));
            byte[] prefixBytes = Bytes.toBytes(prefix);
            filter = new PrefixFilter(Bytes.toBytes(prefix));
            if (startRow.isEmpty()) {
                tableScan.setStartRow(prefixBytes);
            }
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("Query parameters  : Table Name = > " + this.table + " Start Row => " + startRow + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => " + maxVersions + " Batch Size => " + batchSize);
        }
        Table hTable = RESTServlet.getInstance().getTable(this.table);
        tableScan.setBatch(batchSize);
        tableScan.setMaxVersions(maxVersions);
        tableScan.setTimeRange(startTime, endTime);
        if (!startRow.isEmpty()) {
            tableScan.setStartRow(Bytes.toBytes(startRow));
        }
        tableScan.setStopRow(Bytes.toBytes(endRow));
        for (String csplit : column) {
            String[] familysplit = csplit.trim().split(":");
            if (familysplit.length == 2) {
                if (familysplit[1].length() > 0) {
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Scan family and column : " + familysplit[0] + "  " + familysplit[1]);
                    }
                    tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
                } else {
                    tableScan.addFamily(Bytes.toBytes(familysplit[0]));
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
                    }
                    tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
                }
            } else if (StringUtils.isNotEmpty(familysplit[0])) {
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Scan family : " + familysplit[0]);
                }
                tableScan.addFamily(Bytes.toBytes(familysplit[0]));
            }
        }
        FilterList filterList = null;
        if (StringUtils.isNotEmpty(filters)) {
            ParseFilter pf = new ParseFilter();
            Filter filterParam = pf.parseFilterString(filters);
            if (filter != null) {
                filterList = new FilterList(filter, filterParam);
            } else {
                filter = filterParam;
            }
        }
        if (filterList != null) {
            tableScan.setFilter(filterList);
        } else if (filter != null) {
            tableScan.setFilter(filter);
        }
        int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
        tableScan.setCaching(fetchSize);
        tableScan.setReversed(reversed);
        return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit);
    } catch (IOException exp) {
        servlet.getMetrics().incrementFailedScanRequests(1);
        processException(exp);
        LOG.warn(exp);
        return null;
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) FilterList(org.apache.hadoop.hbase.filter.FilterList) IOException(java.io.IOException) Path(javax.ws.rs.Path)

Example 4 with ParseFilter

use of org.apache.hadoop.hbase.filter.ParseFilter in project nifi by apache.

the class HBase_1_1_2_ClientService method getResults.

// 
protected ResultScanner getResults(final Table table, final String startRow, final String endRow, final String filterExpression, final Long timerangeMin, final Long timerangeMax, final Integer limitRows, final Boolean isReversed, final Collection<Column> columns) throws IOException {
    final Scan scan = new Scan();
    if (!StringUtils.isBlank(startRow)) {
        scan.setStartRow(startRow.getBytes(StandardCharsets.UTF_8));
    }
    if (!StringUtils.isBlank(endRow)) {
        scan.setStopRow(endRow.getBytes(StandardCharsets.UTF_8));
    }
    Filter filter = null;
    if (columns != null) {
        for (Column col : columns) {
            if (col.getQualifier() == null) {
                scan.addFamily(col.getFamily());
            } else {
                scan.addColumn(col.getFamily(), col.getQualifier());
            }
        }
    }
    if (!StringUtils.isBlank(filterExpression)) {
        ParseFilter parseFilter = new ParseFilter();
        filter = parseFilter.parseFilterString(filterExpression);
    }
    if (filter != null) {
        scan.setFilter(filter);
    }
    if (timerangeMin != null && timerangeMax != null) {
        scan.setTimeRange(timerangeMin, timerangeMax);
    }
    if (isReversed != null) {
        scan.setReversed(isReversed);
    }
    return table.getScanner(scan);
}
Also used : ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) Filter(org.apache.hadoop.hbase.filter.Filter) PutColumn(org.apache.nifi.hbase.put.PutColumn) Column(org.apache.nifi.hbase.scan.Column) Scan(org.apache.hadoop.hbase.client.Scan)

Example 5 with ParseFilter

use of org.apache.hadoop.hbase.filter.ParseFilter in project nifi by apache.

the class HBase_1_1_2_ClientService method scan.

@Override
public void scan(final String tableName, final Collection<Column> columns, final String filterExpression, final long minTime, final ResultHandler handler) throws IOException {
    Filter filter = null;
    if (!StringUtils.isBlank(filterExpression)) {
        ParseFilter parseFilter = new ParseFilter();
        filter = parseFilter.parseFilterString(filterExpression);
    }
    try (final Table table = connection.getTable(TableName.valueOf(tableName));
        final ResultScanner scanner = getResults(table, columns, filter, minTime)) {
        for (final Result result : scanner) {
            final byte[] rowKey = result.getRow();
            final Cell[] cells = result.rawCells();
            if (cells == null) {
                continue;
            }
            // convert HBase cells to NiFi cells
            final ResultCell[] resultCells = new ResultCell[cells.length];
            for (int i = 0; i < cells.length; i++) {
                final Cell cell = cells[i];
                final ResultCell resultCell = getResultCell(cell);
                resultCells[i] = resultCell;
            }
            // delegate to the handler
            handler.handle(rowKey, resultCells);
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) Filter(org.apache.hadoop.hbase.filter.Filter) ResultCell(org.apache.nifi.hbase.scan.ResultCell) Cell(org.apache.hadoop.hbase.Cell) ResultCell(org.apache.nifi.hbase.scan.ResultCell) Result(org.apache.hadoop.hbase.client.Result) ValidationResult(org.apache.nifi.components.ValidationResult)

Aggregations

ParseFilter (org.apache.hadoop.hbase.filter.ParseFilter)7 Scan (org.apache.hadoop.hbase.client.Scan)5 Table (org.apache.hadoop.hbase.client.Table)4 Filter (org.apache.hadoop.hbase.filter.Filter)4 IOException (java.io.IOException)3 ByteBuffer (java.nio.ByteBuffer)2 FilterList (org.apache.hadoop.hbase.filter.FilterList)2 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)2 Authorizations (org.apache.hadoop.hbase.security.visibility.Authorizations)2 TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)2 Map (java.util.Map)1 Path (javax.ws.rs.Path)1 Cell (org.apache.hadoop.hbase.Cell)1 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)1 Get (org.apache.hadoop.hbase.client.Get)1 Result (org.apache.hadoop.hbase.client.Result)1 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)1 TScan (org.apache.hadoop.hbase.thrift.generated.TScan)1 TGet (org.apache.hadoop.hbase.thrift2.generated.TGet)1 TScan (org.apache.hadoop.hbase.thrift2.generated.TScan)1