Search in sources :

Example 1 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project camel by apache.

the class HBaseConsumer method poll.

@Override
protected int poll() throws Exception {
    try (Table table = endpoint.getTable()) {
        shutdownRunningTask = null;
        pendingExchanges = 0;
        Queue<Exchange> queue = new LinkedList<>();
        Scan scan = new Scan();
        List<Filter> filters = new LinkedList<>();
        if (endpoint.getFilters() != null) {
            filters.addAll(endpoint.getFilters());
        }
        if (maxMessagesPerPoll > 0) {
            filters.add(new PageFilter(maxMessagesPerPoll));
        }
        if (!filters.isEmpty()) {
            Filter compoundFilter = new FilterList(filters);
            scan.setFilter(compoundFilter);
        }
        if (rowModel != null && rowModel.getCells() != null) {
            Set<HBaseCell> cellModels = rowModel.getCells();
            for (HBaseCell cellModel : cellModels) {
                scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(cellModel.getFamily()), HBaseHelper.getHBaseFieldAsBytes(cellModel.getQualifier()));
            }
        }
        ResultScanner scanner = table.getScanner(scan);
        int exchangeCount = 0;
        // The next three statements are used just to get a reference to the BodyCellMappingStrategy instance.
        Exchange exchange = endpoint.createExchange();
        exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
        CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
        for (Result result = scanner.next(); (exchangeCount < maxMessagesPerPoll || maxMessagesPerPoll <= 0) && result != null; result = scanner.next()) {
            HBaseData data = new HBaseData();
            HBaseRow resultRow = new HBaseRow();
            resultRow.apply(rowModel);
            byte[] row = result.getRow();
            resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(rowModel.getRowType(), row));
            List<Cell> cells = result.listCells();
            if (cells != null) {
                Set<HBaseCell> cellModels = rowModel.getCells();
                if (cellModels.size() > 0) {
                    for (HBaseCell modelCell : cellModels) {
                        HBaseCell resultCell = new HBaseCell();
                        String family = modelCell.getFamily();
                        String column = modelCell.getQualifier();
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(modelCell.getValueType(), result.getValue(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column))));
                        resultCell.setFamily(modelCell.getFamily());
                        resultCell.setQualifier(modelCell.getQualifier());
                        resultRow.getCells().add(resultCell);
                    }
                } else {
                    // just need to put every key value into the result Cells
                    for (Cell cell : cells) {
                        String qualifier = new String(CellUtil.cloneQualifier(cell));
                        String family = new String(CellUtil.cloneFamily(cell));
                        HBaseCell resultCell = new HBaseCell();
                        resultCell.setFamily(family);
                        resultCell.setQualifier(qualifier);
                        resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(String.class, CellUtil.cloneValue(cell)));
                        resultRow.getCells().add(resultCell);
                    }
                }
                data.getRows().add(resultRow);
                exchange = endpoint.createExchange();
                // Probably overkill but kept it here for consistency.
                exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
                mappingStrategy.applyScanResults(exchange.getIn(), data);
                //Make sure that there is a header containing the marked row ids, so that they can be deleted.
                exchange.getIn().setHeader(HBaseAttribute.HBASE_MARKED_ROW_ID.asHeader(), result.getRow());
                queue.add(exchange);
                exchangeCount++;
            }
        }
        scanner.close();
        return queue.isEmpty() ? 0 : processBatch(CastUtils.cast(queue));
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) CellMappingStrategy(org.apache.camel.component.hbase.mapping.CellMappingStrategy) FilterList(org.apache.hadoop.hbase.filter.FilterList) LinkedList(java.util.LinkedList) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Result(org.apache.hadoop.hbase.client.Result) Exchange(org.apache.camel.Exchange) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) HBaseData(org.apache.camel.component.hbase.model.HBaseData) HBaseRow(org.apache.camel.component.hbase.model.HBaseRow) Scan(org.apache.hadoop.hbase.client.Scan) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) HBaseCell(org.apache.camel.component.hbase.model.HBaseCell) Cell(org.apache.hadoop.hbase.Cell)

Example 2 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class VisibilityController method preGetOp.

@Override
public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> e, Get get, List<Cell> results) throws IOException {
    if (!initialized) {
        throw new VisibilityControllerNotReadyException("VisibilityController not yet initialized");
    }
    // Nothing useful to do if authorization is not enabled
    if (!authorizationEnabled) {
        return;
    }
    Region region = e.getEnvironment().getRegion();
    Authorizations authorizations = null;
    try {
        authorizations = get.getAuthorizations();
    } catch (DeserializationException de) {
        throw new IOException(de);
    }
    if (authorizations == null) {
        // No Authorizations present for this scan/Get!
        // In case of system tables other than "labels" just scan with out visibility check and
        // filtering. Checking visibility labels for META and NAMESPACE table is not needed.
        TableName table = region.getRegionInfo().getTable();
        if (table.isSystemTable() && !table.equals(LABELS_TABLE_NAME)) {
            return;
        }
    }
    Filter visibilityLabelFilter = VisibilityUtils.createVisibilityLabelFilter(e.getEnvironment().getRegion(), authorizations);
    if (visibilityLabelFilter != null) {
        Filter filter = get.getFilter();
        if (filter != null) {
            get.setFilter(new FilterList(filter, visibilityLabelFilter));
        } else {
            get.setFilter(visibilityLabelFilter);
        }
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Filter(org.apache.hadoop.hbase.filter.Filter) Region(org.apache.hadoop.hbase.regionserver.Region) FilterList(org.apache.hadoop.hbase.filter.FilterList) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 3 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class TimelineFilterUtils method createFilterForConfsOrMetricsToRetrieve.

/**
   * Create filters for confs or metrics to retrieve. This list includes a
   * configs/metrics family filter and relevant filters for confs/metrics to
   * retrieve, if present.
   *
   * @param <T> Describes the type of column prefix.
   * @param confsOrMetricToRetrieve configs/metrics to retrieve.
   * @param columnFamily config or metric column family.
   * @param columnPrefix config or metric column prefix.
   * @return a filter list.
   * @throws IOException if any problem occurs while creating the filters.
   */
public static <T> Filter createFilterForConfsOrMetricsToRetrieve(TimelineFilterList confsOrMetricToRetrieve, ColumnFamily<T> columnFamily, ColumnPrefix<T> columnPrefix) throws IOException {
    Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(columnFamily.getBytes()));
    if (confsOrMetricToRetrieve != null && !confsOrMetricToRetrieve.getFilterList().isEmpty()) {
        // If confsOrMetricsToRetrive are specified, create a filter list based
        // on it and family filter.
        FilterList filter = new FilterList(familyFilter);
        filter.addFilter(createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve));
        return filter;
    } else {
        // Only the family filter needs to be added.
        return familyFilter;
    }
}
Also used : FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 4 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class TimelineFilterUtils method createHBaseFilterList.

/**
   * Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList}
   * while converting different timeline filters(of type {@link TimelineFilter})
   * into their equivalent HBase filters.
   *
   * @param <T> Describes the type of column prefix.
   * @param colPrefix column prefix which will be used for conversion.
   * @param filterList timeline filter list which has to be converted.
   * @return A {@link FilterList} object.
   * @throws IOException if any problem occurs while creating the filter list.
   */
public static <T> FilterList createHBaseFilterList(ColumnPrefix<T> colPrefix, TimelineFilterList filterList) throws IOException {
    FilterList list = new FilterList(getHBaseOperator(filterList.getOperator()));
    for (TimelineFilter filter : filterList.getFilterList()) {
        switch(filter.getFilterType()) {
            case LIST:
                list.addFilter(createHBaseFilterList(colPrefix, (TimelineFilterList) filter));
                break;
            case PREFIX:
                list.addFilter(createHBaseColQualPrefixFilter(colPrefix, (TimelinePrefixFilter) filter));
                break;
            case COMPARE:
                TimelineCompareFilter compareFilter = (TimelineCompareFilter) filter;
                list.addFilter(createHBaseSingleColValueFilter(colPrefix.getColumnFamilyBytes(), colPrefix.getColumnPrefixBytes(compareFilter.getKey()), colPrefix.getValueConverter().encodeValue(compareFilter.getValue()), getHBaseCompareOp(compareFilter.getCompareOp()), compareFilter.getKeyMustExist()));
                break;
            case KEY_VALUE:
                TimelineKeyValueFilter kvFilter = (TimelineKeyValueFilter) filter;
                list.addFilter(createHBaseSingleColValueFilter(colPrefix.getColumnFamilyBytes(), colPrefix.getColumnPrefixBytes(kvFilter.getKey()), colPrefix.getValueConverter().encodeValue(kvFilter.getValue()), getHBaseCompareOp(kvFilter.getCompareOp()), kvFilter.getKeyMustExist()));
                break;
            default:
                LOG.info("Unexpected filter type " + filter.getFilterType());
                break;
        }
    }
    return list;
}
Also used : FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 5 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class TimelineEntityReader method readEntities.

/**
   * Reads and deserializes a set of timeline entities from the HBase storage.
   * It goes through all the results available, and returns the number of
   * entries as specified in the limit in the entity's natural sort order.
   *
   * @param hbaseConf HBase Configuration.
   * @param conn HBase Connection.
   * @return a set of <cite>TimelineEntity</cite> objects.
   * @throws IOException if any exception is encountered while reading entities.
   */
public Set<TimelineEntity> readEntities(Configuration hbaseConf, Connection conn) throws IOException {
    validateParams();
    augmentParams(hbaseConf, conn);
    NavigableSet<TimelineEntity> entities = new TreeSet<>();
    FilterList filterList = createFilterList();
    if (LOG.isDebugEnabled() && filterList != null) {
        LOG.debug("FilterList created for scan is - " + filterList);
    }
    ResultScanner results = getResults(hbaseConf, conn, filterList);
    try {
        for (Result result : results) {
            TimelineEntity entity = parseEntity(result);
            if (entity == null) {
                continue;
            }
            entities.add(entity);
            if (!sortedKeys) {
                if (entities.size() > filters.getLimit()) {
                    entities.pollLast();
                }
            } else {
                if (entities.size() == filters.getLimit()) {
                    break;
                }
            }
        }
        return entities;
    } finally {
        results.close();
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) TreeSet(java.util.TreeSet) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

FilterList (org.apache.hadoop.hbase.filter.FilterList)64 Filter (org.apache.hadoop.hbase.filter.Filter)32 Scan (org.apache.hadoop.hbase.client.Scan)16 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)10 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)10 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)9 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)8 Test (org.junit.Test)8 ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)7 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)7 Transaction (org.apache.tephra.Transaction)7 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)6 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 Result (org.apache.hadoop.hbase.client.Result)5 PageFilter (org.apache.hadoop.hbase.filter.PageFilter)5 Cell (org.apache.hadoop.hbase.Cell)4 TableName (org.apache.hadoop.hbase.TableName)4 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)4 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)4