Search in sources :

Example 1 with HBaseDistributableFilter

use of org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter in project geowave by locationtech.

the class HBaseReader method addDistFilter.

private void addDistFilter(final ReaderParams<T> params, final FilterList filterList) {
    final HBaseDistributableFilter hbdFilter = new HBaseDistributableFilter();
    if (wholeRowEncoding) {
        hbdFilter.setWholeRowFilter(true);
    }
    hbdFilter.setPartitionKeyLength(partitionKeyLength);
    final List<QueryFilter> distFilters = Lists.newArrayList();
    distFilters.add(params.getFilter());
    hbdFilter.init(distFilters, params.getIndex().getIndexModel(), params.getAdditionalAuthorizations());
    filterList.addFilter(hbdFilter);
}
Also used : QueryFilter(org.locationtech.geowave.core.store.query.filter.QueryFilter) HBaseDistributableFilter(org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter)

Example 2 with HBaseDistributableFilter

use of org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter in project geowave by locationtech.

the class AggregationEndpoint method aggregate.

@Override
public void aggregate(final RpcController controller, final AggregationProtosServer.AggregationRequest request, final RpcCallback<AggregationProtosServer.AggregationResponse> done) {
    FilterList filterList = null;
    InternalDataAdapter<?> dataAdapter = null;
    AdapterToIndexMapping indexMapping = null;
    Short internalAdapterId = null;
    AggregationProtosServer.AggregationResponse response = null;
    ByteString value = ByteString.EMPTY;
    // Get the aggregation type
    final Aggregation aggregation = (Aggregation) URLClassloaderUtils.fromClassId(request.getAggregation().getClassId().toByteArray());
    // Handle aggregation params
    if (request.getAggregation().hasParams()) {
        final byte[] parameterBytes = request.getAggregation().getParams().toByteArray();
        final Persistable aggregationParams = URLClassloaderUtils.fromBinary(parameterBytes);
        aggregation.setParameters(aggregationParams);
    }
    HBaseDistributableFilter hdFilter = null;
    if (aggregation != null) {
        if (request.hasRangeFilter()) {
            final byte[] rfilterBytes = request.getRangeFilter().toByteArray();
            try {
                final MultiRowRangeFilter rangeFilter = MultiRowRangeFilter.parseFrom(rfilterBytes);
                filterList = new FilterList(rangeFilter);
            } catch (final Exception e) {
                LOGGER.error("Error creating range filter.", e);
            }
        } else {
            LOGGER.error("Input range filter is undefined.");
        }
        if (request.hasNumericIndexStrategyFilter()) {
            final byte[] nisFilterBytes = request.getNumericIndexStrategyFilter().toByteArray();
            try {
                final HBaseNumericIndexStrategyFilter numericIndexStrategyFilter = HBaseNumericIndexStrategyFilter.parseFrom(nisFilterBytes);
                if (filterList == null) {
                    filterList = new FilterList(numericIndexStrategyFilter);
                } else {
                    filterList.addFilter(numericIndexStrategyFilter);
                }
            } catch (final Exception e) {
                LOGGER.error("Error creating index strategy filter.", e);
            }
        }
        try {
            // in the filter list for the dedupe filter to work correctly
            if (request.hasModel()) {
                hdFilter = new HBaseDistributableFilter();
                if (request.hasWholeRowFilter()) {
                    hdFilter.setWholeRowFilter(request.getWholeRowFilter());
                }
                if (request.hasPartitionKeyLength()) {
                    hdFilter.setPartitionKeyLength(request.getPartitionKeyLength());
                }
                final byte[] filterBytes;
                if (request.hasFilter()) {
                    filterBytes = request.getFilter().toByteArray();
                } else {
                    filterBytes = null;
                }
                final byte[] modelBytes = request.getModel().toByteArray();
                if (hdFilter.init(filterBytes, modelBytes)) {
                    if (filterList == null) {
                        filterList = new FilterList(hdFilter);
                    } else {
                        filterList.addFilter(hdFilter);
                    }
                } else {
                    LOGGER.error("Error creating distributable filter.");
                }
            } else {
                LOGGER.error("Input distributable filter is undefined.");
            }
        } catch (final Exception e) {
            LOGGER.error("Error creating distributable filter.", e);
        }
        if (request.hasAdapter()) {
            final byte[] adapterBytes = request.getAdapter().toByteArray();
            dataAdapter = (InternalDataAdapter<?>) URLClassloaderUtils.fromBinary(adapterBytes);
        }
        if (request.hasInternalAdapterId()) {
            final byte[] adapterIdBytes = request.getInternalAdapterId().toByteArray();
            internalAdapterId = ByteArrayUtils.byteArrayToShort(adapterIdBytes);
        }
        if (request.hasIndexMapping()) {
            final byte[] mappingBytes = request.getIndexMapping().toByteArray();
            indexMapping = (AdapterToIndexMapping) URLClassloaderUtils.fromBinary(mappingBytes);
        }
        final String[] authorizations;
        if (request.hasVisLabels()) {
            final byte[] visBytes = request.getVisLabels().toByteArray();
            if (visBytes.length > 0) {
                authorizations = StringUtils.stringsFromBinary(visBytes);
            } else {
                authorizations = null;
            }
        } else {
            authorizations = null;
        }
        try {
            final Object result = getValue(aggregation, filterList, dataAdapter, indexMapping, internalAdapterId, hdFilter, request.getBlockCaching(), request.getCacheSize(), authorizations);
            URLClassloaderUtils.initClassLoader();
            final byte[] bvalue = aggregation.resultToBinary(result);
            value = ByteString.copyFrom(bvalue);
        } catch (final IOException ioe) {
            LOGGER.error("Error during aggregation.", ioe);
        /*
         * ResponseConverter.setControllerException( controller, ioe);
         */
        } catch (final Exception e) {
            LOGGER.error("Error during aggregation.", e);
        }
    }
    response = AggregationProtosServer.AggregationResponse.newBuilder().setValue(value).build();
    done.run(response);
}
Also used : Persistable(org.locationtech.geowave.core.index.persist.Persistable) ByteString(com.google.protobuf.ByteString) MultiRowRangeFilter(org.apache.hadoop.hbase.filter.MultiRowRangeFilter) AdapterToIndexMapping(org.locationtech.geowave.core.store.AdapterToIndexMapping) FilterList(org.apache.hadoop.hbase.filter.FilterList) ByteString(com.google.protobuf.ByteString) IOException(java.io.IOException) CoprocessorException(org.apache.hadoop.hbase.coprocessor.CoprocessorException) IOException(java.io.IOException) Aggregation(org.locationtech.geowave.core.store.api.Aggregation) HBaseNumericIndexStrategyFilter(org.locationtech.geowave.datastore.hbase.filters.HBaseNumericIndexStrategyFilter) AggregationProtosServer(org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.AggregationProtosServer) HBaseDistributableFilter(org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter)

Example 3 with HBaseDistributableFilter

use of org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter in project geowave by locationtech.

the class HBaseBulkDeleteEndpoint method delete.

@Override
public void delete(final RpcController controller, final BulkDeleteRequest request, final RpcCallback<BulkDeleteResponse> done) {
    long totalRowsDeleted = 0L;
    long totalVersionsDeleted = 0L;
    FilterList filterList = null;
    final List<byte[]> adapterIds = new ArrayList<>();
    Long timestamp = null;
    if (request.hasTimestamp()) {
        timestamp = request.getTimestamp();
    }
    final BulkDeleteType deleteType = request.getDeleteType();
    /**
     * Extract the filter from the bulkDeleteRequest
     */
    HBaseDistributableFilter hdFilter = null;
    if (request.hasRangeFilter()) {
        final byte[] rfilterBytes = request.getRangeFilter().toByteArray();
        try {
            final MultiRowRangeFilter rangeFilter = MultiRowRangeFilter.parseFrom(rfilterBytes);
            filterList = new FilterList(rangeFilter);
        } catch (final Exception e) {
            LOGGER.error("Error creating range filter.", e);
        }
    } else {
        LOGGER.error("Input range filter is undefined.");
    }
    if (request.hasNumericIndexStrategyFilter()) {
        final byte[] nisFilterBytes = request.getNumericIndexStrategyFilter().toByteArray();
        try {
            final HBaseNumericIndexStrategyFilter numericIndexStrategyFilter = HBaseNumericIndexStrategyFilter.parseFrom(nisFilterBytes);
            if (filterList == null) {
                filterList = new FilterList(numericIndexStrategyFilter);
            } else {
                filterList.addFilter(numericIndexStrategyFilter);
            }
        } catch (final Exception e) {
            LOGGER.error("Error creating index strategy filter.", e);
        }
    }
    try {
        // in the filter list for the dedupe filter to work correctly
        if (request.hasModel()) {
            hdFilter = new HBaseDistributableFilter();
            final byte[] filterBytes;
            if (request.hasFilter()) {
                filterBytes = request.getFilter().toByteArray();
            } else {
                filterBytes = null;
            }
            final byte[] modelBytes = request.getModel().toByteArray();
            if (hdFilter.init(filterBytes, modelBytes)) {
                if (filterList == null) {
                    filterList = new FilterList(hdFilter);
                } else {
                    filterList.addFilter(hdFilter);
                }
            } else {
                LOGGER.error("Error creating distributable filter.");
            }
        } else {
            LOGGER.error("Input distributable filter is undefined.");
        }
    } catch (final Exception e) {
        LOGGER.error("Error creating distributable filter.", e);
    }
    if (request.hasAdapterIds()) {
        final ByteBuffer buf = ByteBuffer.wrap(request.getAdapterIds().toByteArray());
        adapterIds.clear();
        while (buf.hasRemaining()) {
            final short adapterId = buf.getShort();
            adapterIds.add(StringUtils.stringToBinary(ByteArrayUtils.shortToString(adapterId)));
        }
    }
    /**
     * Start the actual delete process
     */
    RegionScanner scanner = null;
    try {
        scanner = null;
        final Scan scan = new Scan();
        scan.setFilter(filterList);
        if (!adapterIds.isEmpty()) {
            for (final byte[] adapterId : adapterIds) {
                scan.addFamily(adapterId);
            }
        }
        final Region region = env.getRegion();
        scanner = region.getScanner(scan);
        boolean hasMore = true;
        final int rowBatchSize = request.getRowBatchSize();
        while (hasMore) {
            final List<List<Cell>> deleteRows = new ArrayList<>(rowBatchSize);
            for (int i = 0; i < rowBatchSize; i++) {
                final List<Cell> results = new ArrayList<>();
                hasMore = scanner.next(results);
                if (results.size() > 0) {
                    deleteRows.add(results);
                }
                if (!hasMore) {
                    // There are no more rows.
                    break;
                }
            }
            if (deleteRows.size() > 0) {
                final Mutation[] deleteArr = new Mutation[deleteRows.size()];
                int i = 0;
                for (final List<Cell> deleteRow : deleteRows) {
                    deleteArr[i++] = createDeleteMutation(deleteRow, deleteType, timestamp);
                }
                final OperationStatus[] opStatus = batchMutate(region, deleteArr);
                for (i = 0; i < opStatus.length; i++) {
                    if (opStatus[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) {
                        break;
                    }
                    totalRowsDeleted++;
                    if (deleteType == BulkDeleteType.VERSION) {
                        final byte[] versionsDeleted = deleteArr[i].getAttribute(NO_OF_VERSIONS_TO_DELETE);
                        if (versionsDeleted != null) {
                            totalVersionsDeleted += Bytes.toInt(versionsDeleted);
                        }
                    }
                }
            }
        }
    } catch (final IOException e) {
        LOGGER.error("Unable to delete rows", e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (final IOException ioe) {
                LOGGER.error("Error during bulk delete in HBase.", ioe);
                ;
            }
        }
    }
    final Builder responseBuilder = BulkDeleteResponse.newBuilder();
    responseBuilder.setRowsDeleted(totalRowsDeleted);
    if (deleteType == BulkDeleteType.VERSION) {
        responseBuilder.setVersionsDeleted(totalVersionsDeleted);
    }
    // Send the response back
    final BulkDeleteResponse response = responseBuilder.build();
    done.run(response);
}
Also used : BulkDeleteResponse(org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteResponse) Builder(org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteResponse.Builder) ArrayList(java.util.ArrayList) BulkDeleteType(org.locationtech.geowave.datastore.hbase.coprocessors.protobuf.HBaseBulkDeleteProtosServer.BulkDeleteRequest.BulkDeleteType) OperationStatus(org.apache.hadoop.hbase.regionserver.OperationStatus) FilterList(org.apache.hadoop.hbase.filter.FilterList) ArrayList(java.util.ArrayList) List(java.util.List) Cell(org.apache.hadoop.hbase.Cell) HBaseDistributableFilter(org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter) MultiRowRangeFilter(org.apache.hadoop.hbase.filter.MultiRowRangeFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer) CoprocessorException(org.apache.hadoop.hbase.coprocessor.CoprocessorException) IOException(java.io.IOException) HBaseNumericIndexStrategyFilter(org.locationtech.geowave.datastore.hbase.filters.HBaseNumericIndexStrategyFilter) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Region(org.apache.hadoop.hbase.regionserver.Region) Scan(org.apache.hadoop.hbase.client.Scan) Mutation(org.apache.hadoop.hbase.client.Mutation)

Aggregations

HBaseDistributableFilter (org.locationtech.geowave.datastore.hbase.filters.HBaseDistributableFilter)3 IOException (java.io.IOException)2 CoprocessorException (org.apache.hadoop.hbase.coprocessor.CoprocessorException)2 FilterList (org.apache.hadoop.hbase.filter.FilterList)2 MultiRowRangeFilter (org.apache.hadoop.hbase.filter.MultiRowRangeFilter)2 HBaseNumericIndexStrategyFilter (org.locationtech.geowave.datastore.hbase.filters.HBaseNumericIndexStrategyFilter)2 ByteString (com.google.protobuf.ByteString)1 ByteBuffer (java.nio.ByteBuffer)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 Cell (org.apache.hadoop.hbase.Cell)1 Mutation (org.apache.hadoop.hbase.client.Mutation)1 Scan (org.apache.hadoop.hbase.client.Scan)1 OperationStatus (org.apache.hadoop.hbase.regionserver.OperationStatus)1 Region (org.apache.hadoop.hbase.regionserver.Region)1 RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)1 Persistable (org.locationtech.geowave.core.index.persist.Persistable)1 AdapterToIndexMapping (org.locationtech.geowave.core.store.AdapterToIndexMapping)1 Aggregation (org.locationtech.geowave.core.store.api.Aggregation)1 QueryFilter (org.locationtech.geowave.core.store.query.filter.QueryFilter)1