use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project phoenix by apache.
the class MetaDataEndpointImpl method findChildViews_4_11.
private TableViewFinder findChildViews_4_11(Region region, byte[] tenantId, byte[] schemaName, byte[] tableName) throws IOException {
Scan scan = new Scan();
byte[] startRow = SchemaUtil.getTableKey(tenantId, schemaName, tableName);
byte[] stopRow = ByteUtil.nextKey(startRow);
scan.setStartRow(startRow);
scan.setStopRow(stopRow);
SingleColumnValueFilter linkFilter = new SingleColumnValueFilter(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES, CompareOp.EQUAL, CHILD_TABLE_BYTES);
linkFilter.setFilterIfMissing(true);
scan.setFilter(linkFilter);
scan.addColumn(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES);
scan.addColumn(TABLE_FAMILY_BYTES, PARENT_TENANT_ID_BYTES);
// These deprecated calls work around the issue
try (HTableInterface hTable = ServerUtil.getHTableForCoprocessorScan(env, region.getTableDesc().getTableName().getName())) {
boolean allViewsInCurrentRegion = true;
int numOfChildViews = 0;
List<ViewInfo> viewInfoList = Lists.newArrayList();
try (ResultScanner scanner = hTable.getScanner(scan)) {
for (Result result = scanner.next(); (result != null); result = scanner.next()) {
numOfChildViews++;
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
ResultTuple resultTuple = new ResultTuple(result);
resultTuple.getKey(ptr);
byte[] key = ptr.copyBytes();
if (checkTableKeyInRegion(key, region) != null) {
allViewsInCurrentRegion = false;
}
byte[][] rowViewKeyMetaData = new byte[5][];
getVarChars(result.getRow(), 5, rowViewKeyMetaData);
byte[] viewTenantId = rowViewKeyMetaData[PhoenixDatabaseMetaData.COLUMN_NAME_INDEX];
byte[] viewSchemaName = SchemaUtil.getSchemaNameFromFullName(rowViewKeyMetaData[PhoenixDatabaseMetaData.FAMILY_NAME_INDEX]).getBytes();
byte[] viewName = SchemaUtil.getTableNameFromFullName(rowViewKeyMetaData[PhoenixDatabaseMetaData.FAMILY_NAME_INDEX]).getBytes();
viewInfoList.add(new ViewInfo(viewTenantId, viewSchemaName, viewName));
}
TableViewFinder tableViewFinderResult = new TableViewFinder(viewInfoList);
if (numOfChildViews > 0 && !allViewsInCurrentRegion) {
tableViewFinderResult.setAllViewsNotInSingleRegion();
}
return tableViewFinderResult;
}
}
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project cdap by caskdata.
the class HBase96QueueConsumer method createStateFilter.
/**
* Creates a HBase filter that will filter out rows with state column state = PROCESSED (ignoring transaction).
*/
private Filter createStateFilter() {
byte[] processedMask = new byte[Ints.BYTES * 2 + 1];
processedMask[processedMask.length - 1] = ConsumerEntryState.PROCESSED.getState();
return new SingleColumnValueFilter(QueueEntryRow.COLUMN_FAMILY, stateColumnName, CompareFilter.CompareOp.NOT_EQUAL, new BitComparator(processedMask, BitComparator.BitwiseOp.AND));
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project cdap by caskdata.
the class HBase11QueueConsumer method createStateFilter.
/**
* Creates a HBase filter that will filter out rows with state column state = PROCESSED (ignoring transaction).
*/
private Filter createStateFilter() {
byte[] processedMask = new byte[Ints.BYTES * 2 + 1];
processedMask[processedMask.length - 1] = ConsumerEntryState.PROCESSED.getState();
return new SingleColumnValueFilter(QueueEntryRow.COLUMN_FAMILY, stateColumnName, CompareFilter.CompareOp.NOT_EQUAL, new BitComparator(processedMask, BitComparator.BitwiseOp.AND));
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project cdap by caskdata.
the class HBase10QueueConsumer method createStateFilter.
/**
* Creates a HBase filter that will filter out rows with state column state = PROCESSED (ignoring transaction).
*/
private Filter createStateFilter() {
byte[] processedMask = new byte[Ints.BYTES * 2 + 1];
processedMask[processedMask.length - 1] = ConsumerEntryState.PROCESSED.getState();
return new SingleColumnValueFilter(QueueEntryRow.COLUMN_FAMILY, stateColumnName, CompareFilter.CompareOp.NOT_EQUAL, new BitComparator(processedMask, BitComparator.BitwiseOp.AND));
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project cdap by caskdata.
the class HBase12CDH570QueueConsumer method createStateFilter.
/**
* Creates a HBase filter that will filter out rows with state column state = PROCESSED (ignoring transaction).
*/
private Filter createStateFilter() {
byte[] processedMask = new byte[Ints.BYTES * 2 + 1];
processedMask[processedMask.length - 1] = ConsumerEntryState.PROCESSED.getState();
return new SingleColumnValueFilter(QueueEntryRow.COLUMN_FAMILY, stateColumnName, CompareFilter.CompareOp.NOT_EQUAL, new BitComparator(processedMask, BitComparator.BitwiseOp.AND));
}
Aggregations