Search in sources :

Example 46 with ParseNode

use of org.apache.phoenix.parse.ParseNode in project phoenix by apache.

the class JoinCompiler method optimize.

public static SelectStatement optimize(PhoenixStatement statement, SelectStatement select, final ColumnResolver resolver) throws SQLException {
    TableRef groupByTableRef = null;
    TableRef orderByTableRef = null;
    if (select.getGroupBy() != null && !select.getGroupBy().isEmpty()) {
        ColumnRefParseNodeVisitor groupByVisitor = new ColumnRefParseNodeVisitor(resolver, statement.getConnection());
        for (ParseNode node : select.getGroupBy()) {
            node.accept(groupByVisitor);
        }
        Set<TableRef> set = groupByVisitor.getTableRefSet();
        if (set.size() == 1) {
            groupByTableRef = set.iterator().next();
        }
    } else if (select.getOrderBy() != null && !select.getOrderBy().isEmpty()) {
        ColumnRefParseNodeVisitor orderByVisitor = new ColumnRefParseNodeVisitor(resolver, statement.getConnection());
        for (OrderByNode node : select.getOrderBy()) {
            node.getNode().accept(orderByVisitor);
        }
        Set<TableRef> set = orderByVisitor.getTableRefSet();
        if (set.size() == 1) {
            orderByTableRef = set.iterator().next();
        }
    }
    JoinTable join = compile(statement, select, resolver);
    if (groupByTableRef != null || orderByTableRef != null) {
        QueryCompiler compiler = new QueryCompiler(statement, select, resolver, false);
        List<Object> binds = statement.getParameters();
        StatementContext ctx = new StatementContext(statement, resolver, new Scan(), new SequenceManager(statement));
        QueryPlan plan = compiler.compileJoinQuery(ctx, binds, join, false, false, null);
        TableRef table = plan.getTableRef();
        if (groupByTableRef != null && !groupByTableRef.equals(table)) {
            groupByTableRef = null;
        }
        if (orderByTableRef != null && !orderByTableRef.equals(table)) {
            orderByTableRef = null;
        }
    }
    final Map<TableRef, TableRef> replacement = new HashMap<TableRef, TableRef>();
    for (Table table : join.getTables()) {
        if (table.isSubselect())
            continue;
        TableRef tableRef = table.getTableRef();
        List<ParseNode> groupBy = tableRef.equals(groupByTableRef) ? select.getGroupBy() : null;
        List<OrderByNode> orderBy = tableRef.equals(orderByTableRef) ? select.getOrderBy() : null;
        SelectStatement stmt = getSubqueryForOptimizedPlan(select.getHint(), table.getDynamicColumns(), tableRef, join.getColumnRefs(), table.getPreFiltersCombined(), groupBy, orderBy, table.isWildCardSelect(), select.hasSequence(), select.getUdfParseNodes());
        QueryPlan plan = statement.getConnection().getQueryServices().getOptimizer().optimize(statement, stmt);
        if (!plan.getTableRef().equals(tableRef)) {
            replacement.put(tableRef, plan.getTableRef());
        }
    }
    if (replacement.isEmpty())
        return select;
    TableNode from = select.getFrom();
    TableNode newFrom = from.accept(new TableNodeVisitor<TableNode>() {

        private TableRef resolveTable(String alias, TableName name) throws SQLException {
            if (alias != null)
                return resolver.resolveTable(null, alias);
            return resolver.resolveTable(name.getSchemaName(), name.getTableName());
        }

        private TableName getReplacedTableName(TableRef tableRef) {
            String schemaName = tableRef.getTable().getSchemaName().getString();
            return TableName.create(schemaName.length() == 0 ? null : schemaName, tableRef.getTable().getTableName().getString());
        }

        @Override
        public TableNode visit(BindTableNode boundTableNode) throws SQLException {
            TableRef tableRef = resolveTable(boundTableNode.getAlias(), boundTableNode.getName());
            TableRef replaceRef = replacement.get(tableRef);
            if (replaceRef == null)
                return boundTableNode;
            String alias = boundTableNode.getAlias();
            return NODE_FACTORY.bindTable(alias == null ? null : '"' + alias + '"', getReplacedTableName(replaceRef));
        }

        @Override
        public TableNode visit(JoinTableNode joinNode) throws SQLException {
            TableNode lhs = joinNode.getLHS();
            TableNode rhs = joinNode.getRHS();
            TableNode lhsReplace = lhs.accept(this);
            TableNode rhsReplace = rhs.accept(this);
            if (lhs == lhsReplace && rhs == rhsReplace)
                return joinNode;
            return NODE_FACTORY.join(joinNode.getType(), lhsReplace, rhsReplace, joinNode.getOnNode(), joinNode.isSingleValueOnly());
        }

        @Override
        public TableNode visit(NamedTableNode namedTableNode) throws SQLException {
            TableRef tableRef = resolveTable(namedTableNode.getAlias(), namedTableNode.getName());
            TableRef replaceRef = replacement.get(tableRef);
            if (replaceRef == null)
                return namedTableNode;
            String alias = namedTableNode.getAlias();
            return NODE_FACTORY.namedTable(alias == null ? null : '"' + alias + '"', getReplacedTableName(replaceRef), namedTableNode.getDynamicColumns());
        }

        @Override
        public TableNode visit(DerivedTableNode subselectNode) throws SQLException {
            return subselectNode;
        }
    });
    SelectStatement indexSelect = IndexStatementRewriter.translate(NODE_FACTORY.select(select, newFrom), resolver, replacement);
    for (TableRef indexTableRef : replacement.values()) {
        // replace expressions with corresponding matching columns for functional indexes
        indexSelect = ParseNodeRewriter.rewrite(indexSelect, new IndexExpressionParseNodeRewriter(indexTableRef.getTable(), indexTableRef.getTableAlias(), statement.getConnection(), indexSelect.getUdfParseNodes()));
    }
    return indexSelect;
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) SQLException(java.sql.SQLException) OrderByNode(org.apache.phoenix.parse.OrderByNode) SelectStatement(org.apache.phoenix.parse.SelectStatement) BindTableNode(org.apache.phoenix.parse.BindTableNode) UDFParseNode(org.apache.phoenix.parse.UDFParseNode) ComparisonParseNode(org.apache.phoenix.parse.ComparisonParseNode) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) AndParseNode(org.apache.phoenix.parse.AndParseNode) WildcardParseNode(org.apache.phoenix.parse.WildcardParseNode) TableWildcardParseNode(org.apache.phoenix.parse.TableWildcardParseNode) EqualParseNode(org.apache.phoenix.parse.EqualParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) PTable(org.apache.phoenix.schema.PTable) JoinTableNode(org.apache.phoenix.parse.JoinTableNode) TableName(org.apache.phoenix.parse.TableName) DerivedTableNode(org.apache.phoenix.parse.DerivedTableNode) TableNode(org.apache.phoenix.parse.TableNode) JoinTableNode(org.apache.phoenix.parse.JoinTableNode) NamedTableNode(org.apache.phoenix.parse.NamedTableNode) BindTableNode(org.apache.phoenix.parse.BindTableNode) DerivedTableNode(org.apache.phoenix.parse.DerivedTableNode) NamedTableNode(org.apache.phoenix.parse.NamedTableNode) Scan(org.apache.hadoop.hbase.client.Scan) IndexExpressionParseNodeRewriter(org.apache.phoenix.parse.IndexExpressionParseNodeRewriter) TableRef(org.apache.phoenix.schema.TableRef)

Example 47 with ParseNode

use of org.apache.phoenix.parse.ParseNode in project phoenix by apache.

the class CreateIndexCompiler method compile.

public MutationPlan compile(final CreateIndexStatement create) throws SQLException {
    final PhoenixConnection connection = statement.getConnection();
    final ColumnResolver resolver = FromCompiler.getResolver(create, connection, create.getUdfParseNodes());
    Scan scan = new Scan();
    final StatementContext context = new StatementContext(statement, resolver, scan, new SequenceManager(statement));
    ExpressionCompiler expressionCompiler = new ExpressionCompiler(context);
    List<ParseNode> splitNodes = create.getSplitNodes();
    if (create.getIndexType() == IndexType.LOCAL) {
        if (!splitNodes.isEmpty()) {
            throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_SPLIT_LOCAL_INDEX).build().buildException();
        }
        List<Pair<String, Object>> list = create.getProps() != null ? create.getProps().get("") : null;
        if (list != null) {
            for (Pair<String, Object> pair : list) {
                if (pair.getFirst().equals(PhoenixDatabaseMetaData.SALT_BUCKETS)) {
                    throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_SALT_LOCAL_INDEX).build().buildException();
                }
            }
        }
    }
    final byte[][] splits = new byte[splitNodes.size()][];
    for (int i = 0; i < splits.length; i++) {
        ParseNode node = splitNodes.get(i);
        if (!node.isStateless()) {
            throw new SQLExceptionInfo.Builder(SQLExceptionCode.SPLIT_POINT_NOT_CONSTANT).setMessage("Node: " + node).build().buildException();
        }
        LiteralExpression expression = (LiteralExpression) node.accept(expressionCompiler);
        splits[i] = expression.getBytes();
    }
    final MetaDataClient client = new MetaDataClient(connection);
    return new BaseMutationPlan(context, operation) {

        @Override
        public MutationState execute() throws SQLException {
            return client.createIndex(create, splits);
        }

        @Override
        public ExplainPlan getExplainPlan() throws SQLException {
            return new ExplainPlan(Collections.singletonList("CREATE INDEX"));
        }
    };
}
Also used : MetaDataClient(org.apache.phoenix.schema.MetaDataClient) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) ParseNode(org.apache.phoenix.parse.ParseNode) Scan(org.apache.hadoop.hbase.client.Scan) SQLExceptionInfo(org.apache.phoenix.exception.SQLExceptionInfo) Pair(org.apache.hadoop.hbase.util.Pair)

Example 48 with ParseNode

use of org.apache.phoenix.parse.ParseNode in project phoenix by apache.

the class CreateTableCompiler method compile.

public MutationPlan compile(CreateTableStatement create) throws SQLException {
    final PhoenixConnection connection = statement.getConnection();
    ColumnResolver resolver = FromCompiler.getResolverForCreation(create, connection);
    PTableType type = create.getTableType();
    PhoenixConnection connectionToBe = connection;
    PTable parentToBe = null;
    ViewType viewTypeToBe = null;
    Scan scan = new Scan();
    final StatementContext context = new StatementContext(statement, resolver, scan, new SequenceManager(statement));
    // TODO: support any statement for a VIEW instead of just a WHERE clause
    ParseNode whereNode = create.getWhereClause();
    String viewStatementToBe = null;
    byte[][] viewColumnConstantsToBe = null;
    BitSet isViewColumnReferencedToBe = null;
    // Check whether column families having local index column family suffix or not if present
    // don't allow creating table.
    // Also validate the default values expressions.
    List<ColumnDef> columnDefs = create.getColumnDefs();
    List<ColumnDef> overideColumnDefs = null;
    PrimaryKeyConstraint pkConstraint = create.getPrimaryKeyConstraint();
    for (int i = 0; i < columnDefs.size(); i++) {
        ColumnDef columnDef = columnDefs.get(i);
        if (columnDef.getColumnDefName().getFamilyName() != null && columnDef.getColumnDefName().getFamilyName().contains(QueryConstants.LOCAL_INDEX_COLUMN_FAMILY_PREFIX)) {
            throw new SQLExceptionInfo.Builder(SQLExceptionCode.UNALLOWED_COLUMN_FAMILY).build().buildException();
        }
        // False means we do not need the default (because it evaluated to null)
        if (!columnDef.validateDefault(context, pkConstraint)) {
            if (overideColumnDefs == null) {
                overideColumnDefs = new ArrayList<>(columnDefs);
            }
            overideColumnDefs.set(i, new ColumnDef(columnDef, null));
        }
    }
    if (overideColumnDefs != null) {
        create = new CreateTableStatement(create, overideColumnDefs);
    }
    final CreateTableStatement finalCreate = create;
    if (type == PTableType.VIEW) {
        TableRef tableRef = resolver.getTables().get(0);
        int nColumns = tableRef.getTable().getColumns().size();
        isViewColumnReferencedToBe = new BitSet(nColumns);
        // Used to track column references in a view
        ExpressionCompiler expressionCompiler = new ColumnTrackingExpressionCompiler(context, isViewColumnReferencedToBe);
        parentToBe = tableRef.getTable();
        viewTypeToBe = parentToBe.getViewType() == ViewType.MAPPED ? ViewType.MAPPED : ViewType.UPDATABLE;
        if (whereNode == null) {
            viewStatementToBe = parentToBe.getViewStatement();
        } else {
            whereNode = StatementNormalizer.normalize(whereNode, resolver);
            if (whereNode.isStateless()) {
                throw new SQLExceptionInfo.Builder(SQLExceptionCode.VIEW_WHERE_IS_CONSTANT).build().buildException();
            }
            // If our parent has a VIEW statement, combine it with this one
            if (parentToBe.getViewStatement() != null) {
                SelectStatement select = new SQLParser(parentToBe.getViewStatement()).parseQuery().combine(whereNode);
                whereNode = select.getWhere();
            }
            Expression where = whereNode.accept(expressionCompiler);
            if (where != null && !LiteralExpression.isTrue(where)) {
                TableName baseTableName = create.getBaseTableName();
                StringBuilder buf = new StringBuilder();
                whereNode.toSQL(resolver, buf);
                viewStatementToBe = QueryUtil.getViewStatement(baseTableName.getSchemaName(), baseTableName.getTableName(), buf.toString());
            }
            if (viewTypeToBe != ViewType.MAPPED) {
                Long scn = connection.getSCN();
                connectionToBe = (scn != null || tableRef.getTable().isTransactional()) ? connection : // clocks being in sync.
                new PhoenixConnection(// on our connection.
                new DelegateConnectionQueryServices(connection.getQueryServices()) {

                    @Override
                    public void addTable(PTable table, long resolvedTime) throws SQLException {
                        connection.addTable(table, resolvedTime);
                    }
                }, connection, tableRef.getTimeStamp() + 1);
                viewColumnConstantsToBe = new byte[nColumns][];
                ViewWhereExpressionVisitor visitor = new ViewWhereExpressionVisitor(parentToBe, viewColumnConstantsToBe);
                where.accept(visitor);
                // If view is not updatable, viewColumnConstants should be empty. We will still
                // inherit our parent viewConstants, but we have no additional ones.
                viewTypeToBe = visitor.isUpdatable() ? ViewType.UPDATABLE : ViewType.READ_ONLY;
                if (viewTypeToBe != ViewType.UPDATABLE) {
                    viewColumnConstantsToBe = null;
                }
            }
        }
    }
    final ViewType viewType = viewTypeToBe;
    final String viewStatement = viewStatementToBe;
    final byte[][] viewColumnConstants = viewColumnConstantsToBe;
    final BitSet isViewColumnReferenced = isViewColumnReferencedToBe;
    List<ParseNode> splitNodes = create.getSplitNodes();
    final byte[][] splits = new byte[splitNodes.size()][];
    ImmutableBytesWritable ptr = context.getTempPtr();
    ExpressionCompiler expressionCompiler = new ExpressionCompiler(context);
    for (int i = 0; i < splits.length; i++) {
        ParseNode node = splitNodes.get(i);
        if (node instanceof BindParseNode) {
            context.getBindManager().addParamMetaData((BindParseNode) node, VARBINARY_DATUM);
        }
        if (node.isStateless()) {
            Expression expression = node.accept(expressionCompiler);
            if (expression.evaluate(null, ptr)) {
                ;
                splits[i] = ByteUtil.copyKeyBytesIfNecessary(ptr);
                continue;
            }
        }
        throw new SQLExceptionInfo.Builder(SQLExceptionCode.SPLIT_POINT_NOT_CONSTANT).setMessage("Node: " + node).build().buildException();
    }
    final MetaDataClient client = new MetaDataClient(connectionToBe);
    final PTable parent = parentToBe;
    return new BaseMutationPlan(context, operation) {

        @Override
        public MutationState execute() throws SQLException {
            try {
                return client.createTable(finalCreate, splits, parent, viewStatement, viewType, viewColumnConstants, isViewColumnReferenced);
            } finally {
                if (client.getConnection() != connection) {
                    client.getConnection().close();
                }
            }
        }

        @Override
        public ExplainPlan getExplainPlan() throws SQLException {
            return new ExplainPlan(Collections.singletonList("CREATE TABLE"));
        }
    };
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) SQLException(java.sql.SQLException) DelegateConnectionQueryServices(org.apache.phoenix.query.DelegateConnectionQueryServices) PTable(org.apache.phoenix.schema.PTable) SelectStatement(org.apache.phoenix.parse.SelectStatement) BindParseNode(org.apache.phoenix.parse.BindParseNode) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) SQLExceptionInfo(org.apache.phoenix.exception.SQLExceptionInfo) MetaDataClient(org.apache.phoenix.schema.MetaDataClient) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) PTableType(org.apache.phoenix.schema.PTableType) CreateTableStatement(org.apache.phoenix.parse.CreateTableStatement) BitSet(java.util.BitSet) ColumnDef(org.apache.phoenix.parse.ColumnDef) PrimaryKeyConstraint(org.apache.phoenix.parse.PrimaryKeyConstraint) PrimaryKeyConstraint(org.apache.phoenix.parse.PrimaryKeyConstraint) TableName(org.apache.phoenix.parse.TableName) SQLParser(org.apache.phoenix.parse.SQLParser) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) Expression(org.apache.phoenix.expression.Expression) SingleCellColumnExpression(org.apache.phoenix.expression.SingleCellColumnExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) RowKeyColumnExpression(org.apache.phoenix.expression.RowKeyColumnExpression) IsNullExpression(org.apache.phoenix.expression.IsNullExpression) AndExpression(org.apache.phoenix.expression.AndExpression) ComparisonExpression(org.apache.phoenix.expression.ComparisonExpression) Scan(org.apache.hadoop.hbase.client.Scan) ViewType(org.apache.phoenix.schema.PTable.ViewType) TableRef(org.apache.phoenix.schema.TableRef) BindParseNode(org.apache.phoenix.parse.BindParseNode)

Example 49 with ParseNode

use of org.apache.phoenix.parse.ParseNode in project phoenix by apache.

the class HashJoinPlan method iterator.

@Override
public ResultIterator iterator(ParallelScanGrouper scanGrouper, Scan scan) throws SQLException {
    if (scan == null) {
        scan = delegate.getContext().getScan();
    }
    int count = subPlans.length;
    PhoenixConnection connection = getContext().getConnection();
    ConnectionQueryServices services = connection.getQueryServices();
    ExecutorService executor = services.getExecutor();
    List<Future<ServerCache>> futures = Lists.newArrayListWithExpectedSize(count);
    if (joinInfo != null) {
        hashClient = hashClient != null ? hashClient : new HashCacheClient(delegate.getContext().getConnection());
        firstJobEndTime = new AtomicLong(0);
        keyRangeExpressions = new CopyOnWriteArrayList<Expression>();
    }
    for (int i = 0; i < count; i++) {
        final int index = i;
        futures.add(executor.submit(new JobCallable<ServerCache>() {

            @Override
            public ServerCache call() throws Exception {
                ServerCache cache = subPlans[index].execute(HashJoinPlan.this);
                return cache;
            }

            @Override
            public Object getJobId() {
                return HashJoinPlan.this;
            }

            @Override
            public TaskExecutionMetricsHolder getTaskExecutionMetric() {
                return NO_OP_INSTANCE;
            }
        }));
    }
    SQLException firstException = null;
    for (int i = 0; i < count; i++) {
        try {
            ServerCache result = futures.get(i).get();
            if (result != null) {
                dependencies.add(result);
            }
            subPlans[i].postProcess(result, this);
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
            if (firstException == null) {
                firstException = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).setMessage("Sub plan [" + i + "] execution interrupted.").build().buildException();
            }
        } catch (ExecutionException e) {
            if (firstException == null) {
                firstException = new SQLException("Encountered exception in sub plan [" + i + "] execution.", e.getCause());
            }
        }
    }
    if (firstException != null) {
        SQLCloseables.closeAllQuietly(dependencies);
        throw firstException;
    }
    Expression postFilter = null;
    boolean hasKeyRangeExpressions = keyRangeExpressions != null && !keyRangeExpressions.isEmpty();
    if (recompileWhereClause || hasKeyRangeExpressions) {
        StatementContext context = delegate.getContext();
        PTable table = context.getCurrentTable().getTable();
        ParseNode viewWhere = table.getViewStatement() == null ? null : new SQLParser(table.getViewStatement()).parseQuery().getWhere();
        context.setResolver(FromCompiler.getResolverForQuery((SelectStatement) (delegate.getStatement()), delegate.getContext().getConnection()));
        if (recompileWhereClause) {
            postFilter = WhereCompiler.compile(delegate.getContext(), delegate.getStatement(), viewWhere, null);
        }
        if (hasKeyRangeExpressions) {
            WhereCompiler.compile(delegate.getContext(), delegate.getStatement(), viewWhere, keyRangeExpressions, true, null);
        }
    }
    if (joinInfo != null) {
        HashJoinInfo.serializeHashJoinIntoScan(scan, joinInfo);
    }
    ResultIterator iterator = joinInfo == null ? delegate.iterator(scanGrouper, scan) : ((BaseQueryPlan) delegate).iterator(dependencies, scanGrouper, scan);
    if (statement.getInnerSelectStatement() != null && postFilter != null) {
        iterator = new FilterResultIterator(iterator, postFilter);
    }
    return iterator;
}
Also used : ServerCache(org.apache.phoenix.cache.ServerCacheClient.ServerCache) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) SQLException(java.sql.SQLException) FilterResultIterator(org.apache.phoenix.iterate.FilterResultIterator) ResultIterator(org.apache.phoenix.iterate.ResultIterator) JobCallable(org.apache.phoenix.job.JobManager.JobCallable) HashCacheClient(org.apache.phoenix.join.HashCacheClient) PTable(org.apache.phoenix.schema.PTable) StatementContext(org.apache.phoenix.compile.StatementContext) FilterResultIterator(org.apache.phoenix.iterate.FilterResultIterator) SelectStatement(org.apache.phoenix.parse.SelectStatement) AtomicLong(java.util.concurrent.atomic.AtomicLong) Expression(org.apache.phoenix.expression.Expression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) InListExpression(org.apache.phoenix.expression.InListExpression) RowValueConstructorExpression(org.apache.phoenix.expression.RowValueConstructorExpression) SQLParser(org.apache.phoenix.parse.SQLParser) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) ParseNode(org.apache.phoenix.parse.ParseNode) ExecutionException(java.util.concurrent.ExecutionException) ConnectionQueryServices(org.apache.phoenix.query.ConnectionQueryServices)

Aggregations

ParseNode (org.apache.phoenix.parse.ParseNode)49 ColumnParseNode (org.apache.phoenix.parse.ColumnParseNode)34 Expression (org.apache.phoenix.expression.Expression)23 AndParseNode (org.apache.phoenix.parse.AndParseNode)23 ComparisonParseNode (org.apache.phoenix.parse.ComparisonParseNode)22 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)20 LiteralParseNode (org.apache.phoenix.parse.LiteralParseNode)19 SubqueryParseNode (org.apache.phoenix.parse.SubqueryParseNode)18 AliasedNode (org.apache.phoenix.parse.AliasedNode)16 ExistsParseNode (org.apache.phoenix.parse.ExistsParseNode)15 RowValueConstructorParseNode (org.apache.phoenix.parse.RowValueConstructorParseNode)15 SelectStatement (org.apache.phoenix.parse.SelectStatement)14 UDFParseNode (org.apache.phoenix.parse.UDFParseNode)14 BindParseNode (org.apache.phoenix.parse.BindParseNode)13 RowKeyColumnExpression (org.apache.phoenix.expression.RowKeyColumnExpression)12 FunctionParseNode (org.apache.phoenix.parse.FunctionParseNode)12 SequenceValueParseNode (org.apache.phoenix.parse.SequenceValueParseNode)12 AndExpression (org.apache.phoenix.expression.AndExpression)11 CoerceExpression (org.apache.phoenix.expression.CoerceExpression)11 RowValueConstructorExpression (org.apache.phoenix.expression.RowValueConstructorExpression)11