Search in sources :

Example 6 with ByteArrayComparable

use of org.apache.hadoop.hbase.filter.ByteArrayComparable in project hbase by apache.

the class MultiRowMutationEndpoint method matches.

private boolean matches(Region region, ClientProtos.Condition condition) throws IOException {
    byte[] row = condition.getRow().toByteArray();
    Filter filter = null;
    byte[] family = null;
    byte[] qualifier = null;
    CompareOperator op = null;
    ByteArrayComparable comparator = null;
    if (condition.hasFilter()) {
        filter = ProtobufUtil.toFilter(condition.getFilter());
    } else {
        family = condition.getFamily().toByteArray();
        qualifier = condition.getQualifier().toByteArray();
        op = CompareOperator.valueOf(condition.getCompareType().name());
        comparator = ProtobufUtil.toComparator(condition.getComparator());
    }
    TimeRange timeRange = condition.hasTimeRange() ? ProtobufUtil.toTimeRange(condition.getTimeRange()) : TimeRange.allTime();
    Get get = new Get(row);
    if (family != null) {
        checkFamily(region, family);
        get.addColumn(family, qualifier);
    }
    if (filter != null) {
        get.setFilter(filter);
    }
    if (timeRange != null) {
        get.setTimeRange(timeRange.getMin(), timeRange.getMax());
    }
    boolean matches = false;
    try (RegionScanner scanner = region.getScanner(new Scan(get))) {
        // NOTE: Please don't use HRegion.get() instead,
        // because it will copy cells to heap. See HBASE-26036
        List<Cell> result = new ArrayList<>();
        scanner.next(result);
        if (filter != null) {
            if (!result.isEmpty()) {
                matches = true;
            }
        } else {
            boolean valueIsNull = comparator.getValue() == null || comparator.getValue().length == 0;
            if (result.isEmpty() && valueIsNull) {
                matches = true;
            } else if (result.size() > 0 && result.get(0).getValueLength() == 0 && valueIsNull) {
                matches = true;
            } else if (result.size() == 1 && !valueIsNull) {
                Cell kv = result.get(0);
                int compareResult = PrivateCellUtil.compareValue(kv, comparator);
                matches = matches(op, compareResult);
            }
        }
    }
    return matches;
}
Also used : ArrayList(java.util.ArrayList) CompareOperator(org.apache.hadoop.hbase.CompareOperator) TimeRange(org.apache.hadoop.hbase.io.TimeRange) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Filter(org.apache.hadoop.hbase.filter.Filter) Get(org.apache.hadoop.hbase.client.Get) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell)

Example 7 with ByteArrayComparable

use of org.apache.hadoop.hbase.filter.ByteArrayComparable in project hbase by apache.

the class ProtobufUtil method toComparator.

/**
   * Convert a protocol buffer Comparator to a ByteArrayComparable
   *
   * @param proto the protocol buffer Comparator to convert
   * @return the converted ByteArrayComparable
   */
@SuppressWarnings("unchecked")
public static ByteArrayComparable toComparator(ComparatorProtos.Comparator proto) throws IOException {
    String type = proto.getName();
    String funcName = "parseFrom";
    byte[] value = proto.getSerializedComparator().toByteArray();
    try {
        Class<? extends ByteArrayComparable> c = (Class<? extends ByteArrayComparable>) Class.forName(type, true, CLASS_LOADER);
        Method parseFrom = c.getMethod(funcName, byte[].class);
        if (parseFrom == null) {
            throw new IOException("Unable to locate function: " + funcName + " in type: " + type);
        }
        return (ByteArrayComparable) parseFrom.invoke(null, value);
    } catch (Exception e) {
        throw new IOException(e);
    }
}
Also used : ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) ByteString(com.google.protobuf.ByteString) Method(java.lang.reflect.Method) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) IOException(java.io.IOException) ServiceException(com.google.protobuf.ServiceException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) IOException(java.io.IOException) RemoteException(org.apache.hadoop.ipc.RemoteException)

Example 8 with ByteArrayComparable

use of org.apache.hadoop.hbase.filter.ByteArrayComparable in project drill by apache.

the class MapRDBFilterBuilder method createHBaseScanSpec.

private HBaseScanSpec createHBaseScanSpec(FunctionCall call, CompareFunctionsProcessor processor) {
    String functionName = processor.getFunctionName();
    SchemaPath field = processor.getPath();
    byte[] fieldValue = processor.getValue();
    boolean sortOrderAscending = processor.isSortOrderAscending();
    boolean isRowKey = field.getAsUnescapedPath().equals(ROW_KEY);
    if (!(isRowKey || (!field.getRootSegment().isLastPath() && field.getRootSegment().getChild().isLastPath() && field.getRootSegment().getChild().isNamed()))) {
        /*
       * if the field in this function is neither the row_key nor a qualified HBase column, return.
       */
        return null;
    }
    if (processor.isRowKeyPrefixComparison()) {
        return createRowKeyPrefixScanSpec(call, processor);
    }
    CompareOp compareOp = null;
    boolean isNullTest = false;
    ByteArrayComparable comparator = new BinaryComparator(fieldValue);
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    switch(functionName) {
        case "equal":
            compareOp = CompareOp.EQUAL;
            if (isRowKey) {
                startRow = fieldValue;
                /* stopRow should be just greater than 'value'*/
                stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                compareOp = CompareOp.EQUAL;
            }
            break;
        case "not_equal":
            compareOp = CompareOp.NOT_EQUAL;
            break;
        case "greater_than_or_equal_to":
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case "greater_than":
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            }
            break;
        case "less_than_or_equal_to":
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            }
            break;
        case "less_than":
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case "isnull":
        case "isNull":
        case "is null":
            if (isRowKey) {
                return null;
            }
            isNullTest = true;
            compareOp = CompareOp.EQUAL;
            comparator = new NullComparator();
            break;
        case "isnotnull":
        case "isNotNull":
        case "is not null":
            if (isRowKey) {
                return null;
            }
            compareOp = CompareOp.NOT_EQUAL;
            comparator = new NullComparator();
            break;
        case "like":
            /*
       * Convert the LIKE operand to Regular Expression pattern so that we can
       * apply RegexStringComparator()
       */
            HBaseRegexParser parser = new HBaseRegexParser(call).parse();
            compareOp = CompareOp.EQUAL;
            comparator = new RegexStringComparator(parser.getRegexString());
            /*
       * We can possibly do better if the LIKE operator is on the row_key
       */
            if (isRowKey) {
                String prefix = parser.getPrefixString();
                if (prefix != null) {
                    /*
           * If there is a literal prefix, it can help us prune the scan to a sub range
           */
                    if (prefix.equals(parser.getLikeString())) {
                        /* The operand value is literal. This turns the LIKE operator to EQUAL operator */
                        startRow = stopRow = fieldValue;
                        compareOp = null;
                    } else {
                        startRow = prefix.getBytes(Charsets.UTF_8);
                        stopRow = startRow.clone();
                        boolean isMaxVal = true;
                        for (int i = stopRow.length - 1; i >= 0; --i) {
                            int nextByteValue = (0xff & stopRow[i]) + 1;
                            if (nextByteValue < 0xff) {
                                stopRow[i] = (byte) nextByteValue;
                                isMaxVal = false;
                                break;
                            } else {
                                stopRow[i] = 0;
                            }
                        }
                        if (isMaxVal) {
                            stopRow = HConstants.EMPTY_END_ROW;
                        }
                    }
                }
            }
            break;
    }
    if (compareOp != null || startRow != HConstants.EMPTY_START_ROW || stopRow != HConstants.EMPTY_END_ROW) {
        Filter filter = null;
        if (isRowKey) {
            if (compareOp != null) {
                filter = new RowFilter(compareOp, comparator);
            }
        } else {
            byte[] family = HBaseUtils.getBytes(field.getRootSegment().getPath());
            byte[] qualifier = HBaseUtils.getBytes(field.getRootSegment().getChild().getNameSegment().getPath());
            filter = new SingleColumnValueFilter(family, qualifier, compareOp, comparator);
            ((SingleColumnValueFilter) filter).setLatestVersionOnly(true);
            if (!isNullTest) {
                ((SingleColumnValueFilter) filter).setFilterIfMissing(true);
            }
        }
        return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter);
    }
    // else
    return null;
}
Also used : HBaseScanSpec(org.apache.drill.exec.store.hbase.HBaseScanSpec) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) HBaseRegexParser(org.apache.drill.exec.store.hbase.HBaseRegexParser) NullComparator(org.apache.hadoop.hbase.filter.NullComparator) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) SchemaPath(org.apache.drill.common.expression.SchemaPath) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)

Example 9 with ByteArrayComparable

use of org.apache.hadoop.hbase.filter.ByteArrayComparable in project drill by axbaretto.

the class MapRDBFilterBuilder method createHBaseScanSpec.

private HBaseScanSpec createHBaseScanSpec(FunctionCall call, MaprDBCompareFunctionsProcessor processor) {
    String functionName = processor.getFunctionName();
    SchemaPath field = processor.getPath();
    byte[] fieldValue = processor.getValue();
    boolean sortOrderAscending = processor.isSortOrderAscending();
    boolean isRowKey = field.getRootSegmentPath().equals(ROW_KEY);
    if (!(isRowKey || (!field.getRootSegment().isLastPath() && field.getRootSegment().getChild().isLastPath() && field.getRootSegment().getChild().isNamed()))) {
        /*
       * if the field in this function is neither the row_key nor a qualified HBase column, return.
       */
        return null;
    }
    if (processor.isRowKeyPrefixComparison()) {
        return createRowKeyPrefixScanSpec(call, processor);
    }
    CompareOp compareOp = null;
    boolean isNullTest = false;
    ByteArrayComparable comparator = new BinaryComparator(fieldValue);
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    switch(functionName) {
        case "equal":
            compareOp = CompareOp.EQUAL;
            if (isRowKey) {
                startRow = fieldValue;
                /* stopRow should be just greater than 'value'*/
                stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                compareOp = CompareOp.EQUAL;
            }
            break;
        case "not_equal":
            compareOp = CompareOp.NOT_EQUAL;
            break;
        case "greater_than_or_equal_to":
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case "greater_than":
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            }
            break;
        case "less_than_or_equal_to":
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            }
            break;
        case "less_than":
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case "isnull":
        case "isNull":
        case "is null":
            if (isRowKey) {
                return null;
            }
            isNullTest = true;
            compareOp = CompareOp.EQUAL;
            comparator = new NullComparator();
            break;
        case "isnotnull":
        case "isNotNull":
        case "is not null":
            if (isRowKey) {
                return null;
            }
            compareOp = CompareOp.NOT_EQUAL;
            comparator = new NullComparator();
            break;
        case "like":
            /*
       * Convert the LIKE operand to Regular Expression pattern so that we can
       * apply RegexStringComparator()
       */
            HBaseRegexParser parser = new HBaseRegexParser(call).parse();
            compareOp = CompareOp.EQUAL;
            comparator = new RegexStringComparator(parser.getRegexString());
            /*
       * We can possibly do better if the LIKE operator is on the row_key
       */
            if (isRowKey) {
                String prefix = parser.getPrefixString();
                if (prefix != null) {
                    /*
           * If there is a literal prefix, it can help us prune the scan to a sub range
           */
                    if (prefix.equals(parser.getLikeString())) {
                        /* The operand value is literal. This turns the LIKE operator to EQUAL operator */
                        startRow = stopRow = fieldValue;
                        compareOp = null;
                    } else {
                        startRow = prefix.getBytes(Charsets.UTF_8);
                        stopRow = startRow.clone();
                        boolean isMaxVal = true;
                        for (int i = stopRow.length - 1; i >= 0; --i) {
                            int nextByteValue = (0xff & stopRow[i]) + 1;
                            if (nextByteValue < 0xff) {
                                stopRow[i] = (byte) nextByteValue;
                                isMaxVal = false;
                                break;
                            } else {
                                stopRow[i] = 0;
                            }
                        }
                        if (isMaxVal) {
                            stopRow = HConstants.EMPTY_END_ROW;
                        }
                    }
                }
            }
            break;
    }
    if (compareOp != null || startRow != HConstants.EMPTY_START_ROW || stopRow != HConstants.EMPTY_END_ROW) {
        Filter filter = null;
        if (isRowKey) {
            if (compareOp != null) {
                filter = new RowFilter(compareOp, comparator);
            }
        } else {
            byte[] family = HBaseUtils.getBytes(field.getRootSegment().getPath());
            byte[] qualifier = HBaseUtils.getBytes(field.getRootSegment().getChild().getNameSegment().getPath());
            filter = new SingleColumnValueFilter(family, qualifier, compareOp, comparator);
            ((SingleColumnValueFilter) filter).setLatestVersionOnly(true);
            if (!isNullTest) {
                ((SingleColumnValueFilter) filter).setFilterIfMissing(true);
            }
        }
        return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter);
    }
    // else
    return null;
}
Also used : HBaseScanSpec(org.apache.drill.exec.store.hbase.HBaseScanSpec) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) HBaseRegexParser(org.apache.drill.exec.store.hbase.HBaseRegexParser) NullComparator(org.apache.hadoop.hbase.filter.NullComparator) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) SchemaPath(org.apache.drill.common.expression.SchemaPath) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)

Example 10 with ByteArrayComparable

use of org.apache.hadoop.hbase.filter.ByteArrayComparable in project hbase by apache.

the class HRegion method checkAndMutateInternal.

private CheckAndMutateResult checkAndMutateInternal(CheckAndMutate checkAndMutate, long nonceGroup, long nonce) throws IOException {
    byte[] row = checkAndMutate.getRow();
    Filter filter = null;
    byte[] family = null;
    byte[] qualifier = null;
    CompareOperator op = null;
    ByteArrayComparable comparator = null;
    if (checkAndMutate.hasFilter()) {
        filter = checkAndMutate.getFilter();
    } else {
        family = checkAndMutate.getFamily();
        qualifier = checkAndMutate.getQualifier();
        op = checkAndMutate.getCompareOp();
        comparator = new BinaryComparator(checkAndMutate.getValue());
    }
    TimeRange timeRange = checkAndMutate.getTimeRange();
    Mutation mutation = null;
    RowMutations rowMutations = null;
    if (checkAndMutate.getAction() instanceof Mutation) {
        mutation = (Mutation) checkAndMutate.getAction();
    } else {
        rowMutations = (RowMutations) checkAndMutate.getAction();
    }
    if (mutation != null) {
        checkMutationType(mutation);
        checkRow(mutation, row);
    } else {
        checkRow(rowMutations, row);
    }
    checkReadOnly();
    // TODO, add check for value length also move this check to the client
    checkResources();
    startRegionOperation();
    try {
        Get get = new Get(row);
        if (family != null) {
            checkFamily(family);
            get.addColumn(family, qualifier);
        }
        if (filter != null) {
            get.setFilter(filter);
        }
        if (timeRange != null) {
            get.setTimeRange(timeRange.getMin(), timeRange.getMax());
        }
        // Lock row - note that doBatchMutate will relock this row if called
        checkRow(row, "doCheckAndRowMutate");
        RowLock rowLock = getRowLock(get.getRow(), false, null);
        try {
            if (this.getCoprocessorHost() != null) {
                CheckAndMutateResult result = getCoprocessorHost().preCheckAndMutateAfterRowLock(checkAndMutate);
                if (result != null) {
                    return result;
                }
            }
            // NOTE: We used to wait here until mvcc caught up: mvcc.await();
            // Supposition is that now all changes are done under row locks, then when we go to read,
            // we'll get the latest on this row.
            boolean matches = false;
            long cellTs = 0;
            try (RegionScanner scanner = getScanner(new Scan(get))) {
                // NOTE: Please don't use HRegion.get() instead,
                // because it will copy cells to heap. See HBASE-26036
                List<Cell> result = new ArrayList<>(1);
                scanner.next(result);
                if (filter != null) {
                    if (!result.isEmpty()) {
                        matches = true;
                        cellTs = result.get(0).getTimestamp();
                    }
                } else {
                    boolean valueIsNull = comparator.getValue() == null || comparator.getValue().length == 0;
                    if (result.isEmpty() && valueIsNull) {
                        matches = op != CompareOperator.NOT_EQUAL;
                    } else if (result.size() > 0 && valueIsNull) {
                        matches = (result.get(0).getValueLength() == 0) == (op != CompareOperator.NOT_EQUAL);
                        cellTs = result.get(0).getTimestamp();
                    } else if (result.size() == 1) {
                        Cell kv = result.get(0);
                        cellTs = kv.getTimestamp();
                        int compareResult = PrivateCellUtil.compareValue(kv, comparator);
                        matches = matches(op, compareResult);
                    }
                }
            }
            // If matches, perform the mutation or the rowMutations
            if (matches) {
                // We have acquired the row lock already. If the system clock is NOT monotonically
                // non-decreasing (see HBASE-14070) we should make sure that the mutation has a
                // larger timestamp than what was observed via Get. doBatchMutate already does this, but
                // there is no way to pass the cellTs. See HBASE-14054.
                long now = EnvironmentEdgeManager.currentTime();
                // ensure write is not eclipsed
                long ts = Math.max(now, cellTs);
                byte[] byteTs = Bytes.toBytes(ts);
                if (mutation != null) {
                    if (mutation instanceof Put) {
                        updateCellTimestamps(mutation.getFamilyCellMap().values(), byteTs);
                    }
                // And else 'delete' is not needed since it already does a second get, and sets the
                // timestamp from get (see prepareDeleteTimestamps).
                } else {
                    for (Mutation m : rowMutations.getMutations()) {
                        if (m instanceof Put) {
                            updateCellTimestamps(m.getFamilyCellMap().values(), byteTs);
                        }
                    }
                // And else 'delete' is not needed since it already does a second get, and sets the
                // timestamp from get (see prepareDeleteTimestamps).
                }
                // All edits for the given row (across all column families) must happen atomically.
                Result r;
                if (mutation != null) {
                    r = mutate(mutation, true, nonceGroup, nonce).getResult();
                } else {
                    r = mutateRow(rowMutations, nonceGroup, nonce);
                }
                this.checkAndMutateChecksPassed.increment();
                return new CheckAndMutateResult(true, r);
            }
            this.checkAndMutateChecksFailed.increment();
            return new CheckAndMutateResult(false, null);
        } finally {
            rowLock.release();
        }
    } finally {
        closeRegionOperation();
    }
}
Also used : CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) ArrayList(java.util.ArrayList) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Put(org.apache.hadoop.hbase.client.Put) RowMutations(org.apache.hadoop.hbase.client.RowMutations) Result(org.apache.hadoop.hbase.client.Result) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) CompareOperator(org.apache.hadoop.hbase.CompareOperator) TimeRange(org.apache.hadoop.hbase.io.TimeRange) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) Filter(org.apache.hadoop.hbase.filter.Filter) Get(org.apache.hadoop.hbase.client.Get) Scan(org.apache.hadoop.hbase.client.Scan) Mutation(org.apache.hadoop.hbase.client.Mutation) Cell(org.apache.hadoop.hbase.Cell) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell)

Aggregations

ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)11 Filter (org.apache.hadoop.hbase.filter.Filter)8 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)7 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)6 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)6 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)6 SchemaPath (org.apache.drill.common.expression.SchemaPath)5 NullComparator (org.apache.hadoop.hbase.filter.NullComparator)5 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)5 HBaseRegexParser (org.apache.drill.exec.store.hbase.HBaseRegexParser)3 HBaseScanSpec (org.apache.drill.exec.store.hbase.HBaseScanSpec)3 IOException (java.io.IOException)2 Method (java.lang.reflect.Method)2 ArrayList (java.util.ArrayList)2 Cell (org.apache.hadoop.hbase.Cell)2 CompareOperator (org.apache.hadoop.hbase.CompareOperator)2 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)2 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)2 Get (org.apache.hadoop.hbase.client.Get)2 Scan (org.apache.hadoop.hbase.client.Scan)2