use of org.apache.hadoop.hbase.filter.FilterList in project camel by apache.
the class HBaseConsumer method poll.
@Override
protected int poll() throws Exception {
try (Table table = endpoint.getTable()) {
shutdownRunningTask = null;
pendingExchanges = 0;
Queue<Exchange> queue = new LinkedList<>();
Scan scan = new Scan();
List<Filter> filters = new LinkedList<>();
if (endpoint.getFilters() != null) {
filters.addAll(endpoint.getFilters());
}
if (maxMessagesPerPoll > 0) {
filters.add(new PageFilter(maxMessagesPerPoll));
}
if (!filters.isEmpty()) {
Filter compoundFilter = new FilterList(filters);
scan.setFilter(compoundFilter);
}
if (rowModel != null && rowModel.getCells() != null) {
Set<HBaseCell> cellModels = rowModel.getCells();
for (HBaseCell cellModel : cellModels) {
scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(cellModel.getFamily()), HBaseHelper.getHBaseFieldAsBytes(cellModel.getQualifier()));
}
}
ResultScanner scanner = table.getScanner(scan);
int exchangeCount = 0;
// The next three statements are used just to get a reference to the BodyCellMappingStrategy instance.
Exchange exchange = endpoint.createExchange();
exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
for (Result result = scanner.next(); (exchangeCount < maxMessagesPerPoll || maxMessagesPerPoll <= 0) && result != null; result = scanner.next()) {
HBaseData data = new HBaseData();
HBaseRow resultRow = new HBaseRow();
resultRow.apply(rowModel);
byte[] row = result.getRow();
resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(rowModel.getRowType(), row));
List<Cell> cells = result.listCells();
if (cells != null) {
Set<HBaseCell> cellModels = rowModel.getCells();
if (cellModels.size() > 0) {
for (HBaseCell modelCell : cellModels) {
HBaseCell resultCell = new HBaseCell();
String family = modelCell.getFamily();
String column = modelCell.getQualifier();
resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(modelCell.getValueType(), result.getValue(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column))));
resultCell.setFamily(modelCell.getFamily());
resultCell.setQualifier(modelCell.getQualifier());
resultRow.getCells().add(resultCell);
}
} else {
// just need to put every key value into the result Cells
for (Cell cell : cells) {
String qualifier = new String(CellUtil.cloneQualifier(cell));
String family = new String(CellUtil.cloneFamily(cell));
HBaseCell resultCell = new HBaseCell();
resultCell.setFamily(family);
resultCell.setQualifier(qualifier);
resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(String.class, CellUtil.cloneValue(cell)));
resultRow.getCells().add(resultCell);
}
}
data.getRows().add(resultRow);
exchange = endpoint.createExchange();
// Probably overkill but kept it here for consistency.
exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
mappingStrategy.applyScanResults(exchange.getIn(), data);
//Make sure that there is a header containing the marked row ids, so that they can be deleted.
exchange.getIn().setHeader(HBaseAttribute.HBASE_MARKED_ROW_ID.asHeader(), result.getRow());
queue.add(exchange);
exchangeCount++;
}
}
scanner.close();
return queue.isEmpty() ? 0 : processBatch(CastUtils.cast(queue));
}
}
use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class VisibilityController method preGetOp.
@Override
public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> e, Get get, List<Cell> results) throws IOException {
if (!initialized) {
throw new VisibilityControllerNotReadyException("VisibilityController not yet initialized");
}
// Nothing useful to do if authorization is not enabled
if (!authorizationEnabled) {
return;
}
Region region = e.getEnvironment().getRegion();
Authorizations authorizations = null;
try {
authorizations = get.getAuthorizations();
} catch (DeserializationException de) {
throw new IOException(de);
}
if (authorizations == null) {
// No Authorizations present for this scan/Get!
// In case of system tables other than "labels" just scan with out visibility check and
// filtering. Checking visibility labels for META and NAMESPACE table is not needed.
TableName table = region.getRegionInfo().getTable();
if (table.isSystemTable() && !table.equals(LABELS_TABLE_NAME)) {
return;
}
}
Filter visibilityLabelFilter = VisibilityUtils.createVisibilityLabelFilter(e.getEnvironment().getRegion(), authorizations);
if (visibilityLabelFilter != null) {
Filter filter = get.getFilter();
if (filter != null) {
get.setFilter(new FilterList(filter, visibilityLabelFilter));
} else {
get.setFilter(visibilityLabelFilter);
}
}
}
use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.
the class TimelineFilterUtils method createFilterForConfsOrMetricsToRetrieve.
/**
* Create filters for confs or metrics to retrieve. This list includes a
* configs/metrics family filter and relevant filters for confs/metrics to
* retrieve, if present.
*
* @param <T> Describes the type of column prefix.
* @param confsOrMetricToRetrieve configs/metrics to retrieve.
* @param columnFamily config or metric column family.
* @param columnPrefix config or metric column prefix.
* @return a filter list.
* @throws IOException if any problem occurs while creating the filters.
*/
public static <T> Filter createFilterForConfsOrMetricsToRetrieve(TimelineFilterList confsOrMetricToRetrieve, ColumnFamily<T> columnFamily, ColumnPrefix<T> columnPrefix) throws IOException {
Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(columnFamily.getBytes()));
if (confsOrMetricToRetrieve != null && !confsOrMetricToRetrieve.getFilterList().isEmpty()) {
// If confsOrMetricsToRetrive are specified, create a filter list based
// on it and family filter.
FilterList filter = new FilterList(familyFilter);
filter.addFilter(createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve));
return filter;
} else {
// Only the family filter needs to be added.
return familyFilter;
}
}
use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.
the class TimelineFilterUtils method createHBaseFilterList.
/**
* Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList}
* while converting different timeline filters(of type {@link TimelineFilter})
* into their equivalent HBase filters.
*
* @param <T> Describes the type of column prefix.
* @param colPrefix column prefix which will be used for conversion.
* @param filterList timeline filter list which has to be converted.
* @return A {@link FilterList} object.
* @throws IOException if any problem occurs while creating the filter list.
*/
public static <T> FilterList createHBaseFilterList(ColumnPrefix<T> colPrefix, TimelineFilterList filterList) throws IOException {
FilterList list = new FilterList(getHBaseOperator(filterList.getOperator()));
for (TimelineFilter filter : filterList.getFilterList()) {
switch(filter.getFilterType()) {
case LIST:
list.addFilter(createHBaseFilterList(colPrefix, (TimelineFilterList) filter));
break;
case PREFIX:
list.addFilter(createHBaseColQualPrefixFilter(colPrefix, (TimelinePrefixFilter) filter));
break;
case COMPARE:
TimelineCompareFilter compareFilter = (TimelineCompareFilter) filter;
list.addFilter(createHBaseSingleColValueFilter(colPrefix.getColumnFamilyBytes(), colPrefix.getColumnPrefixBytes(compareFilter.getKey()), colPrefix.getValueConverter().encodeValue(compareFilter.getValue()), getHBaseCompareOp(compareFilter.getCompareOp()), compareFilter.getKeyMustExist()));
break;
case KEY_VALUE:
TimelineKeyValueFilter kvFilter = (TimelineKeyValueFilter) filter;
list.addFilter(createHBaseSingleColValueFilter(colPrefix.getColumnFamilyBytes(), colPrefix.getColumnPrefixBytes(kvFilter.getKey()), colPrefix.getValueConverter().encodeValue(kvFilter.getValue()), getHBaseCompareOp(kvFilter.getCompareOp()), kvFilter.getKeyMustExist()));
break;
default:
LOG.info("Unexpected filter type " + filter.getFilterType());
break;
}
}
return list;
}
use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.
the class TimelineEntityReader method readEntities.
/**
* Reads and deserializes a set of timeline entities from the HBase storage.
* It goes through all the results available, and returns the number of
* entries as specified in the limit in the entity's natural sort order.
*
* @param hbaseConf HBase Configuration.
* @param conn HBase Connection.
* @return a set of <cite>TimelineEntity</cite> objects.
* @throws IOException if any exception is encountered while reading entities.
*/
public Set<TimelineEntity> readEntities(Configuration hbaseConf, Connection conn) throws IOException {
validateParams();
augmentParams(hbaseConf, conn);
NavigableSet<TimelineEntity> entities = new TreeSet<>();
FilterList filterList = createFilterList();
if (LOG.isDebugEnabled() && filterList != null) {
LOG.debug("FilterList created for scan is - " + filterList);
}
ResultScanner results = getResults(hbaseConf, conn, filterList);
try {
for (Result result : results) {
TimelineEntity entity = parseEntity(result);
if (entity == null) {
continue;
}
entities.add(entity);
if (!sortedKeys) {
if (entities.size() > filters.getLimit()) {
entities.pollLast();
}
} else {
if (entities.size() == filters.getLimit()) {
break;
}
}
}
return entities;
} finally {
results.close();
}
}
Aggregations