use of org.apache.hadoop.hbase.filter.ParseFilter in project hbase by apache.
the class TableResource method getScanResource.
@Path("{scanspec: .*[*]$}")
public TableScanResource getScanResource(@PathParam("scanspec") final String scanSpec, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @DefaultValue("") @QueryParam(Constants.SCAN_END_ROW) String endRow, @QueryParam(Constants.SCAN_COLUMN) List<String> column, @DefaultValue("1") @QueryParam(Constants.SCAN_MAX_VERSIONS) int maxVersions, @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, @DefaultValue("true") @QueryParam(Constants.SCAN_CACHE_BLOCKS) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String paramFilter) {
try {
Filter prefixFilter = null;
Scan tableScan = new Scan();
if (scanSpec.indexOf('*') > 0) {
String prefix = scanSpec.substring(0, scanSpec.indexOf('*'));
byte[] prefixBytes = Bytes.toBytes(prefix);
prefixFilter = new PrefixFilter(Bytes.toBytes(prefix));
if (startRow.isEmpty()) {
tableScan.withStartRow(prefixBytes);
}
}
if (LOG.isTraceEnabled()) {
LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => " + maxVersions + " Batch Size => " + batchSize);
}
Table hTable = RESTServlet.getInstance().getTable(this.table);
tableScan.setBatch(batchSize);
tableScan.readVersions(maxVersions);
tableScan.setTimeRange(startTime, endTime);
if (!startRow.isEmpty()) {
tableScan.withStartRow(Bytes.toBytes(startRow));
}
tableScan.withStopRow(Bytes.toBytes(endRow));
for (String col : column) {
byte[][] parts = CellUtil.parseColumn(Bytes.toBytes(col.trim()));
if (parts.length == 1) {
if (LOG.isTraceEnabled()) {
LOG.trace("Scan family : " + Bytes.toStringBinary(parts[0]));
}
tableScan.addFamily(parts[0]);
} else if (parts.length == 2) {
if (LOG.isTraceEnabled()) {
LOG.trace("Scan family and column : " + Bytes.toStringBinary(parts[0]) + " " + Bytes.toStringBinary(parts[1]));
}
tableScan.addColumn(parts[0], parts[1]);
} else {
throw new IllegalArgumentException("Invalid column specifier.");
}
}
FilterList filterList = new FilterList();
if (StringUtils.isNotEmpty(paramFilter)) {
ParseFilter pf = new ParseFilter();
Filter parsedParamFilter = pf.parseFilterString(paramFilter);
if (parsedParamFilter != null) {
filterList.addFilter(parsedParamFilter);
}
if (prefixFilter != null) {
filterList.addFilter(prefixFilter);
}
}
if (filterList.size() > 0) {
tableScan.setFilter(filterList);
}
int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
tableScan.setCaching(fetchSize);
tableScan.setReversed(reversed);
tableScan.setCacheBlocks(cacheBlocks);
return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit);
} catch (IOException exp) {
servlet.getMetrics().incrementFailedScanRequests(1);
processException(exp);
LOG.warn(exp.toString(), exp);
return null;
}
}
use of org.apache.hadoop.hbase.filter.ParseFilter in project hbase by apache.
the class ThriftUtilities method scanFromThrift.
public static Scan scanFromThrift(TScan in) throws IOException {
Scan out = new Scan();
if (in.isSetStartRow()) {
out.withStartRow(in.getStartRow());
}
if (in.isSetStopRow()) {
out.withStopRow(in.getStopRow());
}
if (in.isSetCaching()) {
out.setCaching(in.getCaching());
}
if (in.isSetMaxVersions()) {
out.readVersions(in.getMaxVersions());
}
if (in.isSetColumns()) {
for (TColumn column : in.getColumns()) {
if (column.isSetQualifier()) {
out.addColumn(column.getFamily(), column.getQualifier());
} else {
out.addFamily(column.getFamily());
}
}
}
TTimeRange timeRange = in.getTimeRange();
if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
}
if (in.isSetBatchSize()) {
out.setBatch(in.getBatchSize());
}
if (in.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
}
if (in.isSetAttributes()) {
addAttributes(out, in.getAttributes());
}
if (in.isSetAuthorizations()) {
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
}
if (in.isSetReversed()) {
out.setReversed(in.isReversed());
}
if (in.isSetCacheBlocks()) {
out.setCacheBlocks(in.isCacheBlocks());
}
if (in.isSetColFamTimeRangeMap()) {
Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
}
}
}
if (in.isSetReadType()) {
out.setReadType(readTypeFromThrift(in.getReadType()));
}
if (in.isSetLimit()) {
out.setLimit(in.getLimit());
}
if (in.isSetConsistency()) {
out.setConsistency(consistencyFromThrift(in.getConsistency()));
}
if (in.isSetTargetReplicaId()) {
out.setReplicaId(in.getTargetReplicaId());
}
if (in.isSetFilterBytes()) {
out.setFilter(filterFromThrift(in.getFilterBytes()));
}
return out;
}
Aggregations