Search in sources :

Example 1 with CompareOp

use of org.apache.hadoop.hbase.filter.CompareFilter.CompareOp in project phoenix by apache.

the class ExpressionCompiler method visitLeave.

@Override
public Expression visitLeave(ComparisonParseNode node, List<Expression> children) throws SQLException {
    ParseNode lhsNode = node.getChildren().get(0);
    ParseNode rhsNode = node.getChildren().get(1);
    Expression lhsExpr = children.get(0);
    Expression rhsExpr = children.get(1);
    CompareOp op = node.getFilterOp();
    if (lhsNode instanceof RowValueConstructorParseNode && rhsNode instanceof RowValueConstructorParseNode) {
        int i = 0;
        for (; i < Math.min(lhsExpr.getChildren().size(), rhsExpr.getChildren().size()); i++) {
            addBindParamMetaData(lhsNode.getChildren().get(i), rhsNode.getChildren().get(i), lhsExpr.getChildren().get(i), rhsExpr.getChildren().get(i));
        }
        for (; i < lhsExpr.getChildren().size(); i++) {
            addBindParamMetaData(lhsNode.getChildren().get(i), null, lhsExpr.getChildren().get(i), null);
        }
        for (; i < rhsExpr.getChildren().size(); i++) {
            addBindParamMetaData(null, rhsNode.getChildren().get(i), null, rhsExpr.getChildren().get(i));
        }
    } else if (lhsExpr instanceof RowValueConstructorExpression) {
        addBindParamMetaData(lhsNode.getChildren().get(0), rhsNode, lhsExpr.getChildren().get(0), rhsExpr);
        for (int i = 1; i < lhsExpr.getChildren().size(); i++) {
            addBindParamMetaData(lhsNode.getChildren().get(i), null, lhsExpr.getChildren().get(i), null);
        }
    } else if (rhsExpr instanceof RowValueConstructorExpression) {
        addBindParamMetaData(lhsNode, rhsNode.getChildren().get(0), lhsExpr, rhsExpr.getChildren().get(0));
        for (int i = 1; i < rhsExpr.getChildren().size(); i++) {
            addBindParamMetaData(null, rhsNode.getChildren().get(i), null, rhsExpr.getChildren().get(i));
        }
    } else {
        addBindParamMetaData(lhsNode, rhsNode, lhsExpr, rhsExpr);
    }
    return wrapGroupByExpression(ComparisonExpression.create(op, children, context.getTempPtr(), context.getCurrentTable().getTable().rowKeyOrderOptimizable()));
}
Also used : DecimalAddExpression(org.apache.phoenix.expression.DecimalAddExpression) TimestampSubtractExpression(org.apache.phoenix.expression.TimestampSubtractExpression) ArrayConstructorExpression(org.apache.phoenix.expression.ArrayConstructorExpression) Expression(org.apache.phoenix.expression.Expression) LikeExpression(org.apache.phoenix.expression.LikeExpression) ByteBasedLikeExpression(org.apache.phoenix.expression.ByteBasedLikeExpression) DoubleSubtractExpression(org.apache.phoenix.expression.DoubleSubtractExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) InListExpression(org.apache.phoenix.expression.InListExpression) DateSubtractExpression(org.apache.phoenix.expression.DateSubtractExpression) ArrayElemRefExpression(org.apache.phoenix.expression.function.ArrayElemRefExpression) CaseExpression(org.apache.phoenix.expression.CaseExpression) DoubleDivideExpression(org.apache.phoenix.expression.DoubleDivideExpression) NotExpression(org.apache.phoenix.expression.NotExpression) DoubleAddExpression(org.apache.phoenix.expression.DoubleAddExpression) DecimalDivideExpression(org.apache.phoenix.expression.DecimalDivideExpression) RowKeyColumnExpression(org.apache.phoenix.expression.RowKeyColumnExpression) RowValueConstructorExpression(org.apache.phoenix.expression.RowValueConstructorExpression) RoundTimestampExpression(org.apache.phoenix.expression.function.RoundTimestampExpression) StringConcatExpression(org.apache.phoenix.expression.StringConcatExpression) ComparisonExpression(org.apache.phoenix.expression.ComparisonExpression) TimestampAddExpression(org.apache.phoenix.expression.TimestampAddExpression) CoerceExpression(org.apache.phoenix.expression.CoerceExpression) StringBasedLikeExpression(org.apache.phoenix.expression.StringBasedLikeExpression) ArrayAnyComparisonExpression(org.apache.phoenix.expression.function.ArrayAnyComparisonExpression) DecimalSubtractExpression(org.apache.phoenix.expression.DecimalSubtractExpression) ModulusExpression(org.apache.phoenix.expression.ModulusExpression) DoubleMultiplyExpression(org.apache.phoenix.expression.DoubleMultiplyExpression) DecimalMultiplyExpression(org.apache.phoenix.expression.DecimalMultiplyExpression) DateAddExpression(org.apache.phoenix.expression.DateAddExpression) RoundDecimalExpression(org.apache.phoenix.expression.function.RoundDecimalExpression) LongAddExpression(org.apache.phoenix.expression.LongAddExpression) LongSubtractExpression(org.apache.phoenix.expression.LongSubtractExpression) IsNullExpression(org.apache.phoenix.expression.IsNullExpression) AndExpression(org.apache.phoenix.expression.AndExpression) LongMultiplyExpression(org.apache.phoenix.expression.LongMultiplyExpression) ArrayAllComparisonExpression(org.apache.phoenix.expression.function.ArrayAllComparisonExpression) OrExpression(org.apache.phoenix.expression.OrExpression) LongDivideExpression(org.apache.phoenix.expression.LongDivideExpression) BindParseNode(org.apache.phoenix.parse.BindParseNode) ModulusParseNode(org.apache.phoenix.parse.ModulusParseNode) LikeParseNode(org.apache.phoenix.parse.LikeParseNode) UDFParseNode(org.apache.phoenix.parse.UDFParseNode) ComparisonParseNode(org.apache.phoenix.parse.ComparisonParseNode) SequenceValueParseNode(org.apache.phoenix.parse.SequenceValueParseNode) InListParseNode(org.apache.phoenix.parse.InListParseNode) AndParseNode(org.apache.phoenix.parse.AndParseNode) ExistsParseNode(org.apache.phoenix.parse.ExistsParseNode) SubtractParseNode(org.apache.phoenix.parse.SubtractParseNode) NotParseNode(org.apache.phoenix.parse.NotParseNode) DivideParseNode(org.apache.phoenix.parse.DivideParseNode) StringConcatParseNode(org.apache.phoenix.parse.StringConcatParseNode) OrParseNode(org.apache.phoenix.parse.OrParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode) FunctionParseNode(org.apache.phoenix.parse.FunctionParseNode) RowValueConstructorParseNode(org.apache.phoenix.parse.RowValueConstructorParseNode) MultiplyParseNode(org.apache.phoenix.parse.MultiplyParseNode) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) CaseParseNode(org.apache.phoenix.parse.CaseParseNode) CastParseNode(org.apache.phoenix.parse.CastParseNode) AddParseNode(org.apache.phoenix.parse.AddParseNode) SubqueryParseNode(org.apache.phoenix.parse.SubqueryParseNode) ArithmeticParseNode(org.apache.phoenix.parse.ArithmeticParseNode) IsNullParseNode(org.apache.phoenix.parse.IsNullParseNode) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp) RowValueConstructorParseNode(org.apache.phoenix.parse.RowValueConstructorParseNode) RowValueConstructorExpression(org.apache.phoenix.expression.RowValueConstructorExpression)

Example 2 with CompareOp

use of org.apache.hadoop.hbase.filter.CompareFilter.CompareOp in project phoenix by apache.

the class PrefixFunction method newKeyPart.

@Override
public KeyPart newKeyPart(final KeyPart childPart) {
    return new KeyPart() {

        private final List<Expression> extractNodes = extractNode() ? Collections.<Expression>singletonList(PrefixFunction.this) : Collections.<Expression>emptyList();

        @Override
        public PColumn getColumn() {
            return childPart.getColumn();
        }

        @Override
        public List<Expression> getExtractNodes() {
            return extractNodes;
        }

        @Override
        public KeyRange getKeyRange(CompareOp op, Expression rhs) {
            byte[] lowerRange = KeyRange.UNBOUND;
            byte[] upperRange = KeyRange.UNBOUND;
            boolean lowerInclusive = true;
            PDataType type = getColumn().getDataType();
            switch(op) {
                case EQUAL:
                    lowerRange = evaluateExpression(rhs);
                    upperRange = ByteUtil.nextKey(lowerRange);
                    break;
                case GREATER:
                    lowerRange = ByteUtil.nextKey(evaluateExpression(rhs));
                    break;
                case LESS_OR_EQUAL:
                    upperRange = ByteUtil.nextKey(evaluateExpression(rhs));
                    lowerInclusive = false;
                    break;
                default:
                    return childPart.getKeyRange(op, rhs);
            }
            PColumn column = getColumn();
            Integer length = column.getMaxLength();
            if (type.isFixedWidth()) {
                if (length != null) {
                    // *after* rows with no padding.
                    if (lowerRange != KeyRange.UNBOUND) {
                        lowerRange = type.pad(lowerRange, length, SortOrder.ASC);
                    }
                    if (upperRange != KeyRange.UNBOUND) {
                        upperRange = type.pad(upperRange, length, SortOrder.ASC);
                    }
                }
            } else if (column.getSortOrder() == SortOrder.DESC && getTable().rowKeyOrderOptimizable()) {
                // a lowerRange of 'a\xFF' would skip 'ab', while 'a\x00\xFF' would not.
                if (lowerRange != KeyRange.UNBOUND) {
                    lowerRange = Arrays.copyOf(lowerRange, lowerRange.length + 1);
                    lowerRange[lowerRange.length - 1] = QueryConstants.SEPARATOR_BYTE;
                }
            }
            return KeyRange.getKeyRange(lowerRange, lowerInclusive, upperRange, false);
        }

        @Override
        public PTable getTable() {
            return childPart.getTable();
        }
    };
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) PDataType(org.apache.phoenix.schema.types.PDataType) Expression(org.apache.phoenix.expression.Expression) KeyPart(org.apache.phoenix.compile.KeyPart) List(java.util.List) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)

Example 3 with CompareOp

use of org.apache.hadoop.hbase.filter.CompareFilter.CompareOp in project phoenix by apache.

the class RTrimFunction method newKeyPart.

@Override
public KeyPart newKeyPart(final KeyPart childPart) {
    return new KeyPart() {

        @Override
        public KeyRange getKeyRange(CompareOp op, Expression rhs) {
            byte[] lowerRange = KeyRange.UNBOUND;
            byte[] upperRange = KeyRange.UNBOUND;
            boolean lowerInclusive = true;
            boolean upperInclusive = false;
            PDataType type = getColumn().getDataType();
            SortOrder sortOrder = getColumn().getSortOrder();
            switch(op) {
                case LESS_OR_EQUAL:
                    lowerInclusive = false;
                case EQUAL:
                    upperRange = evaluateExpression(rhs);
                    if (op == CompareOp.EQUAL) {
                        lowerRange = upperRange;
                    }
                    if (sortOrder == SortOrder.ASC || !getTable().rowKeyOrderOptimizable()) {
                        upperRange = Arrays.copyOf(upperRange, upperRange.length + 1);
                        upperRange[upperRange.length - 1] = StringUtil.SPACE_UTF8;
                        ByteUtil.nextKey(upperRange, upperRange.length);
                    } else {
                        upperInclusive = true;
                        if (op == CompareOp.LESS_OR_EQUAL) {
                            // will be the RHS value.
                            break;
                        }
                        /*
                         * Somewhat tricky to get the range correct for the DESC equality case.
                         * The lower range is the RHS value followed by any number of inverted spaces.
                         * We need to add a zero byte as the lower range will have an \xFF byte
                         * appended to it and otherwise we'd skip past any rows where there is more
                         * than one space following the RHS.
                         * The upper range should span up to and including the RHS value. We need
                         * to add our own \xFF as otherwise this will look like a degenerate query
                         * since the lower would be bigger than the upper range.
                         */
                        lowerRange = Arrays.copyOf(lowerRange, lowerRange.length + 2);
                        lowerRange[lowerRange.length - 2] = StringUtil.INVERTED_SPACE_UTF8;
                        lowerRange[lowerRange.length - 1] = QueryConstants.SEPARATOR_BYTE;
                        upperRange = Arrays.copyOf(upperRange, upperRange.length + 1);
                        upperRange[upperRange.length - 1] = QueryConstants.DESC_SEPARATOR_BYTE;
                    }
                    break;
                default:
                    // TOOD: Is this ok for DESC?
                    return childPart.getKeyRange(op, rhs);
            }
            Integer length = getColumn().getMaxLength();
            if (type.isFixedWidth() && length != null) {
                // *after* rows with no padding.
                if (lowerRange != KeyRange.UNBOUND) {
                    lowerRange = type.pad(lowerRange, length, SortOrder.ASC);
                }
                if (upperRange != KeyRange.UNBOUND) {
                    upperRange = type.pad(upperRange, length, SortOrder.ASC);
                }
            }
            return KeyRange.getKeyRange(lowerRange, lowerInclusive, upperRange, upperInclusive);
        }

        @Override
        public List<Expression> getExtractNodes() {
            // non blank characters such as 'foo  bar' where the RHS constant is 'foo'.
            return Collections.<Expression>emptyList();
        }

        @Override
        public PColumn getColumn() {
            return childPart.getColumn();
        }

        @Override
        public PTable getTable() {
            return childPart.getTable();
        }
    };
}
Also used : PDataType(org.apache.phoenix.schema.types.PDataType) Expression(org.apache.phoenix.expression.Expression) KeyPart(org.apache.phoenix.compile.KeyPart) SortOrder(org.apache.phoenix.schema.SortOrder) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)

Example 4 with CompareOp

use of org.apache.hadoop.hbase.filter.CompareFilter.CompareOp in project cxf by apache.

the class HBaseQueryVisitor method buildSimpleQuery.

private Filter buildSimpleQuery(ConditionType ct, String name, Object value) {
    name = super.getRealPropertyName(name);
    validatePropertyValue(name, value);
    Class<?> clazz = getPrimitiveFieldClass(name, value.getClass());
    CompareOp compareOp = null;
    boolean regexCompRequired = false;
    switch(ct) {
        case EQUALS:
            compareOp = CompareOp.EQUAL;
            regexCompRequired = String.class == clazz && value.toString().endsWith("*");
            break;
        case NOT_EQUALS:
            compareOp = CompareOp.NOT_EQUAL;
            regexCompRequired = String.class == clazz && value.toString().endsWith("*");
            break;
        case GREATER_THAN:
            compareOp = CompareOp.GREATER;
            break;
        case GREATER_OR_EQUALS:
            compareOp = CompareOp.GREATER_OR_EQUAL;
            break;
        case LESS_THAN:
            compareOp = CompareOp.LESS;
            break;
        case LESS_OR_EQUALS:
            compareOp = CompareOp.LESS_OR_EQUAL;
            break;
        default:
            break;
    }
    String qualifier = name;
    String theFamily = family != null ? family : familyMap.get(qualifier);
    ByteArrayComparable byteArrayComparable = regexCompRequired ? new RegexStringComparator(value.toString().replace("*", ".")) : new BinaryComparator(value.toString().getBytes(StandardCharsets.UTF_8));
    return new SingleColumnValueFilter(theFamily.getBytes(StandardCharsets.UTF_8), qualifier.getBytes(StandardCharsets.UTF_8), compareOp, byteArrayComparable);
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 5 with CompareOp

use of org.apache.hadoop.hbase.filter.CompareFilter.CompareOp in project drill by axbaretto.

the class HBaseFilterBuilder method createHBaseScanSpec.

private HBaseScanSpec createHBaseScanSpec(FunctionCall call, CompareFunctionsProcessor processor) {
    String functionName = processor.getFunctionName();
    SchemaPath field = processor.getPath();
    byte[] fieldValue = processor.getValue();
    boolean sortOrderAscending = processor.isSortOrderAscending();
    boolean isRowKey = field.getRootSegmentPath().equals(ROW_KEY);
    if (!(isRowKey || (!field.getRootSegment().isLastPath() && field.getRootSegment().getChild().isLastPath() && field.getRootSegment().getChild().isNamed()))) {
        /*
       * if the field in this function is neither the row_key nor a qualified HBase column, return.
       */
        return null;
    }
    if (processor.isRowKeyPrefixComparison()) {
        return createRowKeyPrefixScanSpec(call, processor);
    }
    CompareOp compareOp = null;
    boolean isNullTest = false;
    ByteArrayComparable comparator = new BinaryComparator(fieldValue);
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    switch(functionName) {
        case "equal":
            compareOp = CompareOp.EQUAL;
            if (isRowKey) {
                startRow = fieldValue;
                /* stopRow should be just greater than 'value'*/
                stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                compareOp = CompareOp.EQUAL;
            }
            break;
        case "not_equal":
            compareOp = CompareOp.NOT_EQUAL;
            break;
        case "greater_than_or_equal_to":
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case "greater_than":
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            }
            break;
        case "less_than_or_equal_to":
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            }
            break;
        case "less_than":
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case "isnull":
        case "isNull":
        case "is null":
            if (isRowKey) {
                return null;
            }
            isNullTest = true;
            compareOp = CompareOp.EQUAL;
            comparator = new NullComparator();
            break;
        case "isnotnull":
        case "isNotNull":
        case "is not null":
            if (isRowKey) {
                return null;
            }
            compareOp = CompareOp.NOT_EQUAL;
            comparator = new NullComparator();
            break;
        case "like":
            /*
       * Convert the LIKE operand to Regular Expression pattern so that we can
       * apply RegexStringComparator()
       */
            HBaseRegexParser parser = new HBaseRegexParser(call).parse();
            compareOp = CompareOp.EQUAL;
            comparator = new RegexStringComparator(parser.getRegexString());
            /*
       * We can possibly do better if the LIKE operator is on the row_key
       */
            if (isRowKey) {
                String prefix = parser.getPrefixString();
                if (prefix != null) {
                    /*
           * If there is a literal prefix, it can help us prune the scan to a sub range
           */
                    if (prefix.equals(parser.getLikeString())) {
                        /* The operand value is literal. This turns the LIKE operator to EQUAL operator */
                        startRow = stopRow = fieldValue;
                        compareOp = null;
                    } else {
                        startRow = prefix.getBytes(Charsets.UTF_8);
                        stopRow = startRow.clone();
                        boolean isMaxVal = true;
                        for (int i = stopRow.length - 1; i >= 0; --i) {
                            int nextByteValue = (0xff & stopRow[i]) + 1;
                            if (nextByteValue < 0xff) {
                                stopRow[i] = (byte) nextByteValue;
                                isMaxVal = false;
                                break;
                            } else {
                                stopRow[i] = 0;
                            }
                        }
                        if (isMaxVal) {
                            stopRow = HConstants.EMPTY_END_ROW;
                        }
                    }
                }
            }
            break;
    }
    if (compareOp != null || startRow != HConstants.EMPTY_START_ROW || stopRow != HConstants.EMPTY_END_ROW) {
        Filter filter = null;
        if (isRowKey) {
            if (compareOp != null) {
                filter = new RowFilter(compareOp, comparator);
            }
        } else {
            byte[] family = HBaseUtils.getBytes(field.getRootSegment().getPath());
            byte[] qualifier = HBaseUtils.getBytes(field.getRootSegment().getChild().getNameSegment().getPath());
            filter = new SingleColumnValueFilter(family, qualifier, compareOp, comparator);
            ((SingleColumnValueFilter) filter).setLatestVersionOnly(true);
            if (!isNullTest) {
                ((SingleColumnValueFilter) filter).setFilterIfMissing(true);
            }
        }
        return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter);
    }
    // else
    return null;
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) NullComparator(org.apache.hadoop.hbase.filter.NullComparator) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) SchemaPath(org.apache.drill.common.expression.SchemaPath) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)

Aggregations

CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)12 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)6 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)6 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)6 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)6 SchemaPath (org.apache.drill.common.expression.SchemaPath)5 Filter (org.apache.hadoop.hbase.filter.Filter)5 NullComparator (org.apache.hadoop.hbase.filter.NullComparator)5 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)5 Expression (org.apache.phoenix.expression.Expression)5 HBaseRegexParser (org.apache.drill.exec.store.hbase.HBaseRegexParser)3 HBaseScanSpec (org.apache.drill.exec.store.hbase.HBaseScanSpec)3 KeyPart (org.apache.phoenix.compile.KeyPart)3 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)3 PDataType (org.apache.phoenix.schema.types.PDataType)3 List (java.util.List)2 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)2 AndExpression (org.apache.phoenix.expression.AndExpression)2 ArrayConstructorExpression (org.apache.phoenix.expression.ArrayConstructorExpression)2 ByteBasedLikeExpression (org.apache.phoenix.expression.ByteBasedLikeExpression)2