Search in sources :

Example 11 with AggregateResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse in project hbase by apache.

the class AsyncAggregationClient method avg.

public static <R, S, P extends Message, Q extends Message, T extends Message> CompletableFuture<Double> avg(AsyncTable<?> table, ColumnInterpreter<R, S, P, Q, T> ci, Scan scan) {
    CompletableFuture<Double> future = new CompletableFuture<>();
    AggregateRequest req;
    try {
        req = validateArgAndGetPB(scan, ci, false);
    } catch (IOException e) {
        future.completeExceptionally(e);
        return future;
    }
    AbstractAggregationCallback<Double> callback = new AbstractAggregationCallback<Double>(future) {

        private S sum;

        long count = 0L;

        @Override
        protected void aggregate(RegionInfo region, AggregateResponse resp) throws IOException {
            if (resp.getFirstPartCount() > 0) {
                sum = ci.add(sum, getPromotedValueFromProto(ci, resp, 0));
                count += resp.getSecondPart().asReadOnlyByteBuffer().getLong();
            }
        }

        @Override
        protected Double getFinalResult() {
            return ci.divideForAvg(sum, count);
        }
    };
    table.<AggregateService, AggregateResponse>coprocessorService(AggregateService::newStub, (stub, controller, rpcCallback) -> stub.getAvg(controller, req, rpcCallback), callback).fromRow(nullToEmpty(scan.getStartRow()), scan.includeStartRow()).toRow(nullToEmpty(scan.getStopRow()), scan.includeStopRow()).execute();
    return future;
}
Also used : AdvancedScanResultConsumer(org.apache.hadoop.hbase.client.AdvancedScanResultConsumer) FutureUtils.addListener(org.apache.hadoop.hbase.util.FutureUtils.addListener) CoprocessorCallback(org.apache.hadoop.hbase.client.AsyncTable.CoprocessorCallback) ColumnInterpreter(org.apache.hadoop.hbase.coprocessor.ColumnInterpreter) AggregateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse) Result(org.apache.hadoop.hbase.client.Result) CompletableFuture(java.util.concurrent.CompletableFuture) AggregateRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateRequest) Message(org.apache.hbase.thirdparty.com.google.protobuf.Message) HConstants(org.apache.hadoop.hbase.HConstants) Map(java.util.Map) AggregationHelper.validateArgAndGetPB(org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.validateArgAndGetPB) NoSuchElementException(java.util.NoSuchElementException) Cell(org.apache.hadoop.hbase.Cell) Bytes(org.apache.hadoop.hbase.util.Bytes) ReflectionUtils(org.apache.hadoop.hbase.util.ReflectionUtils) IOException(java.io.IOException) NavigableSet(java.util.NavigableSet) NavigableMap(java.util.NavigableMap) AggregateService(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateService) Scan(org.apache.hadoop.hbase.client.Scan) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) TreeMap(java.util.TreeMap) AggregationHelper.getParsedGenericInstance(org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.getParsedGenericInstance) AsyncTable(org.apache.hadoop.hbase.client.AsyncTable) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) CompletableFuture(java.util.concurrent.CompletableFuture) AggregateRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateRequest) AggregateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) IOException(java.io.IOException)

Example 12 with AggregateResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse in project hbase by apache.

the class AsyncAggregationClient method sum.

public static <R, S, P extends Message, Q extends Message, T extends Message> CompletableFuture<S> sum(AsyncTable<?> table, ColumnInterpreter<R, S, P, Q, T> ci, Scan scan) {
    CompletableFuture<S> future = new CompletableFuture<>();
    AggregateRequest req;
    try {
        req = validateArgAndGetPB(scan, ci, false);
    } catch (IOException e) {
        future.completeExceptionally(e);
        return future;
    }
    AbstractAggregationCallback<S> callback = new AbstractAggregationCallback<S>(future) {

        private S sum;

        @Override
        protected void aggregate(RegionInfo region, AggregateResponse resp) throws IOException {
            if (resp.getFirstPartCount() > 0) {
                S s = getPromotedValueFromProto(ci, resp, 0);
                sum = ci.add(sum, s);
            }
        }

        @Override
        protected S getFinalResult() {
            return sum;
        }
    };
    table.<AggregateService, AggregateResponse>coprocessorService(AggregateService::newStub, (stub, controller, rpcCallback) -> stub.getSum(controller, req, rpcCallback), callback).fromRow(nullToEmpty(scan.getStartRow()), scan.includeStartRow()).toRow(nullToEmpty(scan.getStopRow()), scan.includeStopRow()).execute();
    return future;
}
Also used : AdvancedScanResultConsumer(org.apache.hadoop.hbase.client.AdvancedScanResultConsumer) FutureUtils.addListener(org.apache.hadoop.hbase.util.FutureUtils.addListener) CoprocessorCallback(org.apache.hadoop.hbase.client.AsyncTable.CoprocessorCallback) ColumnInterpreter(org.apache.hadoop.hbase.coprocessor.ColumnInterpreter) AggregateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse) Result(org.apache.hadoop.hbase.client.Result) CompletableFuture(java.util.concurrent.CompletableFuture) AggregateRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateRequest) Message(org.apache.hbase.thirdparty.com.google.protobuf.Message) HConstants(org.apache.hadoop.hbase.HConstants) Map(java.util.Map) AggregationHelper.validateArgAndGetPB(org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.validateArgAndGetPB) NoSuchElementException(java.util.NoSuchElementException) Cell(org.apache.hadoop.hbase.Cell) Bytes(org.apache.hadoop.hbase.util.Bytes) ReflectionUtils(org.apache.hadoop.hbase.util.ReflectionUtils) IOException(java.io.IOException) NavigableSet(java.util.NavigableSet) NavigableMap(java.util.NavigableMap) AggregateService(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateService) Scan(org.apache.hadoop.hbase.client.Scan) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) TreeMap(java.util.TreeMap) AggregationHelper.getParsedGenericInstance(org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.getParsedGenericInstance) AsyncTable(org.apache.hadoop.hbase.client.AsyncTable) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) CompletableFuture(java.util.concurrent.CompletableFuture) AggregateRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateRequest) AggregateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) IOException(java.io.IOException)

Example 13 with AggregateResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse in project hbase by apache.

the class AggregateImplementation method getMax.

/**
 * Gives the maximum for a given combination of column qualifier and column
 * family, in the given row range as defined in the Scan object. In its
 * current implementation, it takes one column family and one column qualifier
 * (if provided). In case of null column qualifier, maximum value for the
 * entire column family will be returned.
 */
@Override
public void getMax(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) {
    InternalScanner scanner = null;
    AggregateResponse response = null;
    T max = null;
    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        T temp;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        scanner = env.getRegion().getScanner(scan);
        List<Cell> results = new ArrayList<>();
        byte[] colFamily = scan.getFamilies()[0];
        NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
        byte[] qualifier = null;
        if (qualifiers != null && !qualifiers.isEmpty()) {
            qualifier = qualifiers.pollFirst();
        }
        // qualifier can be null.
        boolean hasMoreRows = false;
        do {
            hasMoreRows = scanner.next(results);
            int listSize = results.size();
            for (int i = 0; i < listSize; i++) {
                temp = ci.getValue(colFamily, qualifier, results.get(i));
                max = (max == null || (temp != null && ci.compare(temp, max) > 0)) ? temp : max;
            }
            results.clear();
        } while (hasMoreRows);
        if (max != null) {
            AggregateResponse.Builder builder = AggregateResponse.newBuilder();
            builder.addFirstPart(ci.getProtoForCellType(max).toByteString());
            response = builder.build();
        }
    } catch (IOException e) {
        CoprocessorRpcUtils.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    log.info("Maximum from this region is " + env.getRegion().getRegionInfo().getRegionNameAsString() + ": " + max);
    done.run(response);
}
Also used : InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) AggregateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell)

Example 14 with AggregateResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse in project hbase by apache.

the class AggregateImplementation method getMin.

/**
 * Gives the minimum for a given combination of column qualifier and column
 * family, in the given row range as defined in the Scan object. In its
 * current implementation, it takes one column family and one column qualifier
 * (if provided). In case of null column qualifier, minimum value for the
 * entire column family will be returned.
 */
@Override
public void getMin(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) {
    AggregateResponse response = null;
    InternalScanner scanner = null;
    T min = null;
    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        T temp;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        scanner = env.getRegion().getScanner(scan);
        List<Cell> results = new ArrayList<>();
        byte[] colFamily = scan.getFamilies()[0];
        NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
        byte[] qualifier = null;
        if (qualifiers != null && !qualifiers.isEmpty()) {
            qualifier = qualifiers.pollFirst();
        }
        boolean hasMoreRows = false;
        do {
            hasMoreRows = scanner.next(results);
            int listSize = results.size();
            for (int i = 0; i < listSize; i++) {
                temp = ci.getValue(colFamily, qualifier, results.get(i));
                min = (min == null || (temp != null && ci.compare(temp, min) < 0)) ? temp : min;
            }
            results.clear();
        } while (hasMoreRows);
        if (min != null) {
            response = AggregateResponse.newBuilder().addFirstPart(ci.getProtoForCellType(min).toByteString()).build();
        }
    } catch (IOException e) {
        CoprocessorRpcUtils.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    log.info("Minimum from this region is " + env.getRegion().getRegionInfo().getRegionNameAsString() + ": " + min);
    done.run(response);
}
Also used : InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) AggregateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell)

Example 15 with AggregateResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse in project hbase by apache.

the class AggregateImplementation method getSum.

/**
 * Gives the sum for a given combination of column qualifier and column
 * family, in the given row range as defined in the Scan object. In its
 * current implementation, it takes one column family and one column qualifier
 * (if provided). In case of null column qualifier, sum for the entire column
 * family will be returned.
 */
@Override
public void getSum(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) {
    AggregateResponse response = null;
    InternalScanner scanner = null;
    long sum = 0L;
    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        S sumVal = null;
        T temp;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        scanner = env.getRegion().getScanner(scan);
        byte[] colFamily = scan.getFamilies()[0];
        NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
        byte[] qualifier = null;
        if (qualifiers != null && !qualifiers.isEmpty()) {
            qualifier = qualifiers.pollFirst();
        }
        List<Cell> results = new ArrayList<>();
        boolean hasMoreRows = false;
        do {
            hasMoreRows = scanner.next(results);
            int listSize = results.size();
            for (int i = 0; i < listSize; i++) {
                temp = ci.getValue(colFamily, qualifier, results.get(i));
                if (temp != null) {
                    sumVal = ci.add(sumVal, ci.castToReturnType(temp));
                }
            }
            results.clear();
        } while (hasMoreRows);
        if (sumVal != null) {
            response = AggregateResponse.newBuilder().addFirstPart(ci.getProtoForPromotedType(sumVal).toByteString()).build();
        }
    } catch (IOException e) {
        CoprocessorRpcUtils.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    log.debug("Sum from this region is " + env.getRegion().getRegionInfo().getRegionNameAsString() + ": " + sum);
    done.run(response);
}
Also used : InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) AggregateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

IOException (java.io.IOException)21 AggregateResponse (org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse)21 Cell (org.apache.hadoop.hbase.Cell)14 Scan (org.apache.hadoop.hbase.client.Scan)14 AggregateRequest (org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateRequest)14 AggregateService (org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateService)14 ArrayList (java.util.ArrayList)9 ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)9 NavigableMap (java.util.NavigableMap)8 TreeMap (java.util.TreeMap)8 Map (java.util.Map)7 NavigableSet (java.util.NavigableSet)7 NoSuchElementException (java.util.NoSuchElementException)7 CompletableFuture (java.util.concurrent.CompletableFuture)7 HConstants (org.apache.hadoop.hbase.HConstants)7 AdvancedScanResultConsumer (org.apache.hadoop.hbase.client.AdvancedScanResultConsumer)7 AsyncTable (org.apache.hadoop.hbase.client.AsyncTable)7 CoprocessorCallback (org.apache.hadoop.hbase.client.AsyncTable.CoprocessorCallback)7 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)7 Result (org.apache.hadoop.hbase.client.Result)7