Search in sources :

Example 6 with ParseFilter

use of org.apache.hadoop.hbase.filter.ParseFilter in project hbase by apache.

the class TableResource method getScanResource.

@Path("{scanspec: .*[*]$}")
public TableScanResource getScanResource(@PathParam("scanspec") final String scanSpec, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @DefaultValue("") @QueryParam(Constants.SCAN_END_ROW) String endRow, @QueryParam(Constants.SCAN_COLUMN) List<String> column, @DefaultValue("1") @QueryParam(Constants.SCAN_MAX_VERSIONS) int maxVersions, @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, @DefaultValue("true") @QueryParam(Constants.SCAN_CACHE_BLOCKS) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String paramFilter) {
    try {
        Filter prefixFilter = null;
        Scan tableScan = new Scan();
        if (scanSpec.indexOf('*') > 0) {
            String prefix = scanSpec.substring(0, scanSpec.indexOf('*'));
            byte[] prefixBytes = Bytes.toBytes(prefix);
            prefixFilter = new PrefixFilter(Bytes.toBytes(prefix));
            if (startRow.isEmpty()) {
                tableScan.withStartRow(prefixBytes);
            }
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("Query parameters  : Table Name = > " + this.table + " Start Row => " + startRow + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => " + maxVersions + " Batch Size => " + batchSize);
        }
        Table hTable = RESTServlet.getInstance().getTable(this.table);
        tableScan.setBatch(batchSize);
        tableScan.readVersions(maxVersions);
        tableScan.setTimeRange(startTime, endTime);
        if (!startRow.isEmpty()) {
            tableScan.withStartRow(Bytes.toBytes(startRow));
        }
        tableScan.withStopRow(Bytes.toBytes(endRow));
        for (String col : column) {
            byte[][] parts = CellUtil.parseColumn(Bytes.toBytes(col.trim()));
            if (parts.length == 1) {
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Scan family : " + Bytes.toStringBinary(parts[0]));
                }
                tableScan.addFamily(parts[0]);
            } else if (parts.length == 2) {
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Scan family and column : " + Bytes.toStringBinary(parts[0]) + "  " + Bytes.toStringBinary(parts[1]));
                }
                tableScan.addColumn(parts[0], parts[1]);
            } else {
                throw new IllegalArgumentException("Invalid column specifier.");
            }
        }
        FilterList filterList = new FilterList();
        if (StringUtils.isNotEmpty(paramFilter)) {
            ParseFilter pf = new ParseFilter();
            Filter parsedParamFilter = pf.parseFilterString(paramFilter);
            if (parsedParamFilter != null) {
                filterList.addFilter(parsedParamFilter);
            }
            if (prefixFilter != null) {
                filterList.addFilter(prefixFilter);
            }
        }
        if (filterList.size() > 0) {
            tableScan.setFilter(filterList);
        }
        int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
        tableScan.setCaching(fetchSize);
        tableScan.setReversed(reversed);
        tableScan.setCacheBlocks(cacheBlocks);
        return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit);
    } catch (IOException exp) {
        servlet.getMetrics().incrementFailedScanRequests(1);
        processException(exp);
        LOG.warn(exp.toString(), exp);
        return null;
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) IOException(java.io.IOException) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) Path(org.apache.hbase.thirdparty.javax.ws.rs.Path)

Example 7 with ParseFilter

use of org.apache.hadoop.hbase.filter.ParseFilter in project hbase by apache.

the class ThriftUtilities method scanFromThrift.

public static Scan scanFromThrift(TScan in) throws IOException {
    Scan out = new Scan();
    if (in.isSetStartRow()) {
        out.withStartRow(in.getStartRow());
    }
    if (in.isSetStopRow()) {
        out.withStopRow(in.getStopRow());
    }
    if (in.isSetCaching()) {
        out.setCaching(in.getCaching());
    }
    if (in.isSetMaxVersions()) {
        out.readVersions(in.getMaxVersions());
    }
    if (in.isSetColumns()) {
        for (TColumn column : in.getColumns()) {
            if (column.isSetQualifier()) {
                out.addColumn(column.getFamily(), column.getQualifier());
            } else {
                out.addFamily(column.getFamily());
            }
        }
    }
    TTimeRange timeRange = in.getTimeRange();
    if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
        out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
    }
    if (in.isSetBatchSize()) {
        out.setBatch(in.getBatchSize());
    }
    if (in.isSetFilterString()) {
        ParseFilter parseFilter = new ParseFilter();
        out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
    }
    if (in.isSetAttributes()) {
        addAttributes(out, in.getAttributes());
    }
    if (in.isSetAuthorizations()) {
        out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
    }
    if (in.isSetReversed()) {
        out.setReversed(in.isReversed());
    }
    if (in.isSetCacheBlocks()) {
        out.setCacheBlocks(in.isCacheBlocks());
    }
    if (in.isSetColFamTimeRangeMap()) {
        Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
        if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
            for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
                out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
            }
        }
    }
    if (in.isSetReadType()) {
        out.setReadType(readTypeFromThrift(in.getReadType()));
    }
    if (in.isSetLimit()) {
        out.setLimit(in.getLimit());
    }
    if (in.isSetConsistency()) {
        out.setConsistency(consistencyFromThrift(in.getConsistency()));
    }
    if (in.isSetTargetReplicaId()) {
        out.setReplicaId(in.getTargetReplicaId());
    }
    if (in.isSetFilterBytes()) {
        out.setFilter(filterFromThrift(in.getFilterBytes()));
    }
    return out;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) TTimeRange(org.apache.hadoop.hbase.thrift2.generated.TTimeRange) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) Scan(org.apache.hadoop.hbase.client.Scan) ByteBuffer(java.nio.ByteBuffer) Map(java.util.Map)

Aggregations

ParseFilter (org.apache.hadoop.hbase.filter.ParseFilter)7 Scan (org.apache.hadoop.hbase.client.Scan)5 Table (org.apache.hadoop.hbase.client.Table)4 Filter (org.apache.hadoop.hbase.filter.Filter)4 IOException (java.io.IOException)3 ByteBuffer (java.nio.ByteBuffer)2 FilterList (org.apache.hadoop.hbase.filter.FilterList)2 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)2 Authorizations (org.apache.hadoop.hbase.security.visibility.Authorizations)2 TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)2 Map (java.util.Map)1 Path (javax.ws.rs.Path)1 Cell (org.apache.hadoop.hbase.Cell)1 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)1 Get (org.apache.hadoop.hbase.client.Get)1 Result (org.apache.hadoop.hbase.client.Result)1 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)1 TScan (org.apache.hadoop.hbase.thrift.generated.TScan)1 TGet (org.apache.hadoop.hbase.thrift2.generated.TGet)1 TScan (org.apache.hadoop.hbase.thrift2.generated.TScan)1