Search in sources :

Example 1 with JoinType

use of org.apache.phoenix.parse.JoinTableNode.JoinType in project phoenix by apache.

the class QueryCompiler method compileJoinQuery.

/*
     * Call compileJoinQuery() for join queries recursively down to the leaf JoinTable nodes.
     * This matches the input JoinTable node against patterns in the following order:
     * 1. A (leaf JoinTable node, which can be a named table reference or a subquery of any kind.)
     *    Returns the compilation result of a single table scan or of an independent subquery.
     * 2. Matching either of (when hint USE_SORT_MERGE_JOIN not specified):
     *        1) A LEFT/INNER JOIN B
     *        2) A LEFT/INNER JOIN B (LEFT/INNER JOIN C)+, if hint NO_STAR_JOIN not specified
     *        where A can be a named table reference or a flat subquery, and B, C, ... can be a named
     *        table reference, a sub-join or a subquery of any kind.
     *    Returns a HashJoinPlan{scan: A, hash: B, C, ...}.
     * 3. Matching pattern:
     *        A RIGHT/INNER JOIN B (when hint USE_SORT_MERGE_JOIN not specified)
     *        where B can be a named table reference or a flat subquery, and A can be a named table
     *        reference, a sub-join or a subquery of any kind.
     *    Returns a HashJoinPlan{scan: B, hash: A}.
     *    NOTE that "A LEFT/RIGHT/INNER/FULL JOIN B RIGHT/INNER JOIN C" is viewed as
     *    "(A LEFT/RIGHT/INNER/FULL JOIN B) RIGHT/INNER JOIN C" here, which means the left part in the
     *    parenthesis is considered a sub-join.
     *    viewed as a sub-join.
     * 4. All the rest that do not qualify for previous patterns or conditions, including FULL joins.
     *    Returns a SortMergeJoinPlan, the sorting part of which is pushed down to the JoinTable nodes
     *    of both sides as order-by clauses.
     * NOTE that SEMI or ANTI joins are treated the same way as LEFT joins in JoinTable pattern matching.
     *    
     * If no join algorithm hint is provided, according to the above compilation process, a join query 
     * plan can probably consist of both HashJoinPlan and SortMergeJoinPlan which may enclose each other.
     * TODO 1) Use table statistics to guide the choice of join plans.
     *      2) Make it possible to hint a certain join algorithm for a specific join step.
     */
@SuppressWarnings("unchecked")
protected QueryPlan compileJoinQuery(StatementContext context, List<Object> binds, JoinTable joinTable, boolean asSubquery, boolean projectPKColumns, List<OrderByNode> orderBy) throws SQLException {
    byte[] emptyByteArray = new byte[0];
    List<JoinSpec> joinSpecs = joinTable.getJoinSpecs();
    if (joinSpecs.isEmpty()) {
        Table table = joinTable.getTable();
        SelectStatement subquery = table.getAsSubquery(orderBy);
        if (!table.isSubselect()) {
            context.setCurrentTable(table.getTableRef());
            PTable projectedTable = table.createProjectedTable(!projectPKColumns, context);
            TupleProjector projector = new TupleProjector(projectedTable);
            TupleProjector.serializeProjectorIntoScan(context.getScan(), projector);
            context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), subquery.getUdfParseNodes()));
            table.projectColumns(context.getScan());
            return compileSingleFlatQuery(context, subquery, binds, asSubquery, !asSubquery, null, projectPKColumns ? projector : null, true);
        }
        QueryPlan plan = compileSubquery(subquery, false);
        PTable projectedTable = table.createProjectedTable(plan.getProjector());
        context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), subquery.getUdfParseNodes()));
        return new TupleProjectionPlan(plan, new TupleProjector(plan.getProjector()), table.compilePostFilterExpression(context));
    }
    boolean[] starJoinVector;
    if (!this.useSortMergeJoin && (starJoinVector = joinTable.getStarJoinVector()) != null) {
        Table table = joinTable.getTable();
        PTable initialProjectedTable;
        TableRef tableRef;
        SelectStatement query;
        TupleProjector tupleProjector;
        if (!table.isSubselect()) {
            context.setCurrentTable(table.getTableRef());
            initialProjectedTable = table.createProjectedTable(!projectPKColumns, context);
            tableRef = table.getTableRef();
            table.projectColumns(context.getScan());
            query = joinTable.getAsSingleSubquery(table.getAsSubquery(orderBy), asSubquery);
            tupleProjector = new TupleProjector(initialProjectedTable);
        } else {
            SelectStatement subquery = table.getAsSubquery(orderBy);
            QueryPlan plan = compileSubquery(subquery, false);
            initialProjectedTable = table.createProjectedTable(plan.getProjector());
            tableRef = plan.getTableRef();
            context.getScan().setFamilyMap(plan.getContext().getScan().getFamilyMap());
            query = joinTable.getAsSingleSubquery((SelectStatement) plan.getStatement(), asSubquery);
            tupleProjector = new TupleProjector(plan.getProjector());
        }
        context.setCurrentTable(tableRef);
        PTable projectedTable = initialProjectedTable;
        int count = joinSpecs.size();
        ImmutableBytesPtr[] joinIds = new ImmutableBytesPtr[count];
        List<Expression>[] joinExpressions = new List[count];
        JoinType[] joinTypes = new JoinType[count];
        PTable[] tables = new PTable[count];
        int[] fieldPositions = new int[count];
        StatementContext[] subContexts = new StatementContext[count];
        QueryPlan[] subPlans = new QueryPlan[count];
        HashSubPlan[] hashPlans = new HashSubPlan[count];
        fieldPositions[0] = projectedTable.getColumns().size() - projectedTable.getPKColumns().size();
        for (int i = 0; i < count; i++) {
            JoinSpec joinSpec = joinSpecs.get(i);
            Scan subScan = ScanUtil.newScan(originalScan);
            subContexts[i] = new StatementContext(statement, context.getResolver(), subScan, new SequenceManager(statement));
            subPlans[i] = compileJoinQuery(subContexts[i], binds, joinSpec.getJoinTable(), true, true, null);
            boolean hasPostReference = joinSpec.getJoinTable().hasPostReference();
            if (hasPostReference) {
                tables[i] = subContexts[i].getResolver().getTables().get(0).getTable();
                projectedTable = JoinCompiler.joinProjectedTables(projectedTable, tables[i], joinSpec.getType());
            } else {
                tables[i] = null;
            }
        }
        for (int i = 0; i < count; i++) {
            JoinSpec joinSpec = joinSpecs.get(i);
            context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), query.getUdfParseNodes()));
            // place-holder
            joinIds[i] = new ImmutableBytesPtr(emptyByteArray);
            Pair<List<Expression>, List<Expression>> joinConditions = joinSpec.compileJoinConditions(context, subContexts[i], true);
            joinExpressions[i] = joinConditions.getFirst();
            List<Expression> hashExpressions = joinConditions.getSecond();
            Pair<Expression, Expression> keyRangeExpressions = new Pair<Expression, Expression>(null, null);
            boolean optimized = getKeyExpressionCombinations(keyRangeExpressions, context, joinTable.getStatement(), tableRef, joinSpec.getType(), joinExpressions[i], hashExpressions);
            Expression keyRangeLhsExpression = keyRangeExpressions.getFirst();
            Expression keyRangeRhsExpression = keyRangeExpressions.getSecond();
            joinTypes[i] = joinSpec.getType();
            if (i < count - 1) {
                fieldPositions[i + 1] = fieldPositions[i] + (tables[i] == null ? 0 : (tables[i].getColumns().size() - tables[i].getPKColumns().size()));
            }
            hashPlans[i] = new HashSubPlan(i, subPlans[i], optimized ? null : hashExpressions, joinSpec.isSingleValueOnly(), keyRangeLhsExpression, keyRangeRhsExpression);
        }
        TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector);
        QueryPlan plan = compileSingleFlatQuery(context, query, binds, asSubquery, !asSubquery && joinTable.isAllLeftJoin(), null, !table.isSubselect() && projectPKColumns ? tupleProjector : null, true);
        Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context, table);
        Integer limit = null;
        Integer offset = null;
        if (!query.isAggregate() && !query.isDistinct() && query.getOrderBy().isEmpty()) {
            limit = plan.getLimit();
            offset = plan.getOffset();
        }
        HashJoinInfo joinInfo = new HashJoinInfo(projectedTable, joinIds, joinExpressions, joinTypes, starJoinVector, tables, fieldPositions, postJoinFilterExpression, QueryUtil.getOffsetLimit(limit, offset));
        return HashJoinPlan.create(joinTable.getStatement(), plan, joinInfo, hashPlans);
    }
    JoinSpec lastJoinSpec = joinSpecs.get(joinSpecs.size() - 1);
    JoinType type = lastJoinSpec.getType();
    if (!this.useSortMergeJoin && (type == JoinType.Right || type == JoinType.Inner) && lastJoinSpec.getJoinTable().getJoinSpecs().isEmpty() && lastJoinSpec.getJoinTable().getTable().isFlat()) {
        JoinTable rhsJoinTable = lastJoinSpec.getJoinTable();
        Table rhsTable = rhsJoinTable.getTable();
        JoinTable lhsJoin = joinTable.getSubJoinTableWithoutPostFilters();
        Scan subScan = ScanUtil.newScan(originalScan);
        StatementContext lhsCtx = new StatementContext(statement, context.getResolver(), subScan, new SequenceManager(statement));
        QueryPlan lhsPlan = compileJoinQuery(lhsCtx, binds, lhsJoin, true, true, null);
        PTable rhsProjTable;
        TableRef rhsTableRef;
        SelectStatement rhs;
        TupleProjector tupleProjector;
        if (!rhsTable.isSubselect()) {
            context.setCurrentTable(rhsTable.getTableRef());
            rhsProjTable = rhsTable.createProjectedTable(!projectPKColumns, context);
            rhsTableRef = rhsTable.getTableRef();
            rhsTable.projectColumns(context.getScan());
            rhs = rhsJoinTable.getAsSingleSubquery(rhsTable.getAsSubquery(orderBy), asSubquery);
            tupleProjector = new TupleProjector(rhsProjTable);
        } else {
            SelectStatement subquery = rhsTable.getAsSubquery(orderBy);
            QueryPlan plan = compileSubquery(subquery, false);
            rhsProjTable = rhsTable.createProjectedTable(plan.getProjector());
            rhsTableRef = plan.getTableRef();
            context.getScan().setFamilyMap(plan.getContext().getScan().getFamilyMap());
            rhs = rhsJoinTable.getAsSingleSubquery((SelectStatement) plan.getStatement(), asSubquery);
            tupleProjector = new TupleProjector(plan.getProjector());
        }
        context.setCurrentTable(rhsTableRef);
        context.setResolver(FromCompiler.getResolverForProjectedTable(rhsProjTable, context.getConnection(), rhs.getUdfParseNodes()));
        ImmutableBytesPtr[] joinIds = new ImmutableBytesPtr[] { new ImmutableBytesPtr(emptyByteArray) };
        Pair<List<Expression>, List<Expression>> joinConditions = lastJoinSpec.compileJoinConditions(lhsCtx, context, true);
        List<Expression> joinExpressions = joinConditions.getSecond();
        List<Expression> hashExpressions = joinConditions.getFirst();
        boolean needsMerge = lhsJoin.hasPostReference();
        PTable lhsTable = needsMerge ? lhsCtx.getResolver().getTables().get(0).getTable() : null;
        int fieldPosition = needsMerge ? rhsProjTable.getColumns().size() - rhsProjTable.getPKColumns().size() : 0;
        PTable projectedTable = needsMerge ? JoinCompiler.joinProjectedTables(rhsProjTable, lhsTable, type == JoinType.Right ? JoinType.Left : type) : rhsProjTable;
        TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector);
        context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), rhs.getUdfParseNodes()));
        QueryPlan rhsPlan = compileSingleFlatQuery(context, rhs, binds, asSubquery, !asSubquery && type == JoinType.Right, null, !rhsTable.isSubselect() && projectPKColumns ? tupleProjector : null, true);
        Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context, rhsTable);
        Integer limit = null;
        Integer offset = null;
        if (!rhs.isAggregate() && !rhs.isDistinct() && rhs.getOrderBy().isEmpty()) {
            limit = rhsPlan.getLimit();
            offset = rhsPlan.getOffset();
        }
        HashJoinInfo joinInfo = new HashJoinInfo(projectedTable, joinIds, new List[] { joinExpressions }, new JoinType[] { type == JoinType.Right ? JoinType.Left : type }, new boolean[] { true }, new PTable[] { lhsTable }, new int[] { fieldPosition }, postJoinFilterExpression, QueryUtil.getOffsetLimit(limit, offset));
        Pair<Expression, Expression> keyRangeExpressions = new Pair<Expression, Expression>(null, null);
        getKeyExpressionCombinations(keyRangeExpressions, context, joinTable.getStatement(), rhsTableRef, type, joinExpressions, hashExpressions);
        return HashJoinPlan.create(joinTable.getStatement(), rhsPlan, joinInfo, new HashSubPlan[] { new HashSubPlan(0, lhsPlan, hashExpressions, false, keyRangeExpressions.getFirst(), keyRangeExpressions.getSecond()) });
    }
    JoinTable lhsJoin = joinTable.getSubJoinTableWithoutPostFilters();
    JoinTable rhsJoin = lastJoinSpec.getJoinTable();
    if (type == JoinType.Right) {
        JoinTable temp = lhsJoin;
        lhsJoin = rhsJoin;
        rhsJoin = temp;
    }
    List<EqualParseNode> joinConditionNodes = lastJoinSpec.getOnConditions();
    List<OrderByNode> lhsOrderBy = Lists.<OrderByNode>newArrayListWithExpectedSize(joinConditionNodes.size());
    List<OrderByNode> rhsOrderBy = Lists.<OrderByNode>newArrayListWithExpectedSize(joinConditionNodes.size());
    for (EqualParseNode condition : joinConditionNodes) {
        lhsOrderBy.add(NODE_FACTORY.orderBy(type == JoinType.Right ? condition.getRHS() : condition.getLHS(), false, true));
        rhsOrderBy.add(NODE_FACTORY.orderBy(type == JoinType.Right ? condition.getLHS() : condition.getRHS(), false, true));
    }
    Scan lhsScan = ScanUtil.newScan(originalScan);
    StatementContext lhsCtx = new StatementContext(statement, context.getResolver(), lhsScan, new SequenceManager(statement));
    boolean preserveRowkey = !projectPKColumns && type != JoinType.Full;
    QueryPlan lhsPlan = compileJoinQuery(lhsCtx, binds, lhsJoin, true, !preserveRowkey, lhsOrderBy);
    PTable lhsProjTable = lhsCtx.getResolver().getTables().get(0).getTable();
    boolean isInRowKeyOrder = preserveRowkey && lhsPlan.getOrderBy().getOrderByExpressions().isEmpty();
    Scan rhsScan = ScanUtil.newScan(originalScan);
    StatementContext rhsCtx = new StatementContext(statement, context.getResolver(), rhsScan, new SequenceManager(statement));
    QueryPlan rhsPlan = compileJoinQuery(rhsCtx, binds, rhsJoin, true, true, rhsOrderBy);
    PTable rhsProjTable = rhsCtx.getResolver().getTables().get(0).getTable();
    Pair<List<Expression>, List<Expression>> joinConditions = lastJoinSpec.compileJoinConditions(type == JoinType.Right ? rhsCtx : lhsCtx, type == JoinType.Right ? lhsCtx : rhsCtx, false);
    List<Expression> lhsKeyExpressions = type == JoinType.Right ? joinConditions.getSecond() : joinConditions.getFirst();
    List<Expression> rhsKeyExpressions = type == JoinType.Right ? joinConditions.getFirst() : joinConditions.getSecond();
    boolean needsMerge = rhsJoin.hasPostReference();
    int fieldPosition = needsMerge ? lhsProjTable.getColumns().size() - lhsProjTable.getPKColumns().size() : 0;
    PTable projectedTable = needsMerge ? JoinCompiler.joinProjectedTables(lhsProjTable, rhsProjTable, type == JoinType.Right ? JoinType.Left : type) : lhsProjTable;
    ColumnResolver resolver = FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), joinTable.getStatement().getUdfParseNodes());
    TableRef tableRef = resolver.getTables().get(0);
    StatementContext subCtx = new StatementContext(statement, resolver, ScanUtil.newScan(originalScan), new SequenceManager(statement));
    subCtx.setCurrentTable(tableRef);
    QueryPlan innerPlan = new SortMergeJoinPlan(subCtx, joinTable.getStatement(), tableRef, type == JoinType.Right ? JoinType.Left : type, lhsPlan, rhsPlan, lhsKeyExpressions, rhsKeyExpressions, projectedTable, lhsProjTable, needsMerge ? rhsProjTable : null, fieldPosition, lastJoinSpec.isSingleValueOnly());
    context.setCurrentTable(tableRef);
    context.setResolver(resolver);
    TableNode from = NODE_FACTORY.namedTable(tableRef.getTableAlias(), NODE_FACTORY.table(tableRef.getTable().getSchemaName().getString(), tableRef.getTable().getTableName().getString()));
    ParseNode where = joinTable.getPostFiltersCombined();
    SelectStatement select = asSubquery ? NODE_FACTORY.select(from, joinTable.getStatement().getHint(), false, Collections.<AliasedNode>emptyList(), where, null, null, orderBy, null, null, 0, false, joinTable.getStatement().hasSequence(), Collections.<SelectStatement>emptyList(), joinTable.getStatement().getUdfParseNodes()) : NODE_FACTORY.select(joinTable.getStatement(), from, where);
    return compileSingleFlatQuery(context, select, binds, asSubquery, false, innerPlan, null, isInRowKeyOrder);
}
Also used : TupleProjector(org.apache.phoenix.execute.TupleProjector) TupleProjectionPlan(org.apache.phoenix.execute.TupleProjectionPlan) OrderByNode(org.apache.phoenix.parse.OrderByNode) PTable(org.apache.phoenix.schema.PTable) HashSubPlan(org.apache.phoenix.execute.HashJoinPlan.HashSubPlan) SelectStatement(org.apache.phoenix.parse.SelectStatement) SortMergeJoinPlan(org.apache.phoenix.execute.SortMergeJoinPlan) SubqueryParseNode(org.apache.phoenix.parse.SubqueryParseNode) EqualParseNode(org.apache.phoenix.parse.EqualParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) List(java.util.List) ArrayList(java.util.ArrayList) Pair(org.apache.hadoop.hbase.util.Pair) Table(org.apache.phoenix.compile.JoinCompiler.Table) JoinTable(org.apache.phoenix.compile.JoinCompiler.JoinTable) PTable(org.apache.phoenix.schema.PTable) ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) JoinSpec(org.apache.phoenix.compile.JoinCompiler.JoinSpec) JoinType(org.apache.phoenix.parse.JoinTableNode.JoinType) AliasedNode(org.apache.phoenix.parse.AliasedNode) Hint(org.apache.phoenix.parse.HintNode.Hint) Expression(org.apache.phoenix.expression.Expression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) RowValueConstructorExpression(org.apache.phoenix.expression.RowValueConstructorExpression) HashJoinInfo(org.apache.phoenix.join.HashJoinInfo) TableNode(org.apache.phoenix.parse.TableNode) Scan(org.apache.hadoop.hbase.client.Scan) TableRef(org.apache.phoenix.schema.TableRef) JoinTable(org.apache.phoenix.compile.JoinCompiler.JoinTable) EqualParseNode(org.apache.phoenix.parse.EqualParseNode)

Example 2 with JoinType

use of org.apache.phoenix.parse.JoinTableNode.JoinType in project phoenix by apache.

the class HashJoinRegionScanner method processResults.

private void processResults(List<Cell> result, boolean hasBatchLimit) throws IOException {
    if (result.isEmpty())
        return;
    Tuple tuple = useQualifierAsListIndex ? new PositionBasedResultTuple(result) : new ResultTuple(Result.create(result));
    // always returns true.
    if (joinInfo.forceProjection()) {
        tuple = projector.projectResults(tuple, useNewValueColumnQualifier);
    }
    // TODO: fix below Scanner.next() and Scanner.nextRaw() methods as well.
    if (hasBatchLimit)
        throw new UnsupportedOperationException("Cannot support join operations in scans with limit");
    int count = joinInfo.getJoinIds().length;
    boolean cont = true;
    for (int i = 0; i < count; i++) {
        if (!(joinInfo.earlyEvaluation()[i]) || hashCaches[i] == null)
            continue;
        ImmutableBytesPtr key = TupleUtil.getConcatenatedValue(tuple, joinInfo.getJoinExpressions()[i]);
        tempTuples[i] = hashCaches[i].get(key);
        JoinType type = joinInfo.getJoinTypes()[i];
        if (((type == JoinType.Inner || type == JoinType.Semi) && tempTuples[i] == null) || (type == JoinType.Anti && tempTuples[i] != null)) {
            cont = false;
            break;
        }
    }
    if (cont) {
        if (projector == null) {
            int dup = 1;
            for (int i = 0; i < count; i++) {
                dup *= (tempTuples[i] == null ? 1 : tempTuples[i].size());
            }
            for (int i = 0; i < dup; i++) {
                resultQueue.offer(tuple);
            }
        } else {
            KeyValueSchema schema = joinInfo.getJoinedSchema();
            if (!joinInfo.forceProjection()) {
                // backward compatibility
                tuple = projector.projectResults(tuple, useNewValueColumnQualifier);
            }
            resultQueue.offer(tuple);
            for (int i = 0; i < count; i++) {
                boolean earlyEvaluation = joinInfo.earlyEvaluation()[i];
                JoinType type = joinInfo.getJoinTypes()[i];
                if (earlyEvaluation && (type == JoinType.Semi || type == JoinType.Anti))
                    continue;
                int j = resultQueue.size();
                while (j-- > 0) {
                    Tuple lhs = resultQueue.poll();
                    if (!earlyEvaluation) {
                        ImmutableBytesPtr key = TupleUtil.getConcatenatedValue(lhs, joinInfo.getJoinExpressions()[i]);
                        tempTuples[i] = hashCaches[i].get(key);
                        if (tempTuples[i] == null) {
                            if (type == JoinType.Inner || type == JoinType.Semi) {
                                continue;
                            } else if (type == JoinType.Anti) {
                                resultQueue.offer(lhs);
                                continue;
                            }
                        }
                    }
                    if (tempTuples[i] == null) {
                        Tuple joined = tempSrcBitSet[i] == ValueBitSet.EMPTY_VALUE_BITSET ? lhs : TupleProjector.mergeProjectedValue((ProjectedValueTuple) lhs, schema, tempDestBitSet, null, joinInfo.getSchemas()[i], tempSrcBitSet[i], joinInfo.getFieldPositions()[i], useNewValueColumnQualifier);
                        resultQueue.offer(joined);
                        continue;
                    }
                    for (Tuple t : tempTuples[i]) {
                        Tuple joined = tempSrcBitSet[i] == ValueBitSet.EMPTY_VALUE_BITSET ? lhs : TupleProjector.mergeProjectedValue((ProjectedValueTuple) lhs, schema, tempDestBitSet, t, joinInfo.getSchemas()[i], tempSrcBitSet[i], joinInfo.getFieldPositions()[i], useNewValueColumnQualifier);
                        resultQueue.offer(joined);
                    }
                }
            }
        }
        // apply post-join filter
        Expression postFilter = joinInfo.getPostJoinFilterExpression();
        if (postFilter != null) {
            for (Iterator<Tuple> iter = resultQueue.iterator(); iter.hasNext(); ) {
                Tuple t = iter.next();
                postFilter.reset();
                ImmutableBytesPtr tempPtr = new ImmutableBytesPtr();
                try {
                    if (!postFilter.evaluate(t, tempPtr)) {
                        iter.remove();
                        continue;
                    }
                } catch (IllegalDataException e) {
                    iter.remove();
                    continue;
                }
                Boolean b = (Boolean) postFilter.getDataType().toObject(tempPtr);
                if (!b.booleanValue()) {
                    iter.remove();
                }
            }
        }
    }
}
Also used : PositionBasedResultTuple(org.apache.phoenix.schema.tuple.PositionBasedResultTuple) PositionBasedResultTuple(org.apache.phoenix.schema.tuple.PositionBasedResultTuple) ResultTuple(org.apache.phoenix.schema.tuple.ResultTuple) ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) JoinType(org.apache.phoenix.parse.JoinTableNode.JoinType) ProjectedValueTuple(org.apache.phoenix.execute.TupleProjector.ProjectedValueTuple) Expression(org.apache.phoenix.expression.Expression) KeyValueSchema(org.apache.phoenix.schema.KeyValueSchema) PositionBasedResultTuple(org.apache.phoenix.schema.tuple.PositionBasedResultTuple) Tuple(org.apache.phoenix.schema.tuple.Tuple) ResultTuple(org.apache.phoenix.schema.tuple.ResultTuple) ProjectedValueTuple(org.apache.phoenix.execute.TupleProjector.ProjectedValueTuple) IllegalDataException(org.apache.phoenix.schema.IllegalDataException)

Example 3 with JoinType

use of org.apache.phoenix.parse.JoinTableNode.JoinType in project phoenix by apache.

the class SubqueryRewriter method visitLeave.

@Override
public ParseNode visitLeave(ComparisonParseNode node, List<ParseNode> l) throws SQLException {
    boolean isTopNode = topNode == node;
    if (isTopNode) {
        topNode = null;
    }
    ParseNode secondChild = l.get(1);
    if (!(secondChild instanceof SubqueryParseNode)) {
        return super.visitLeave(node, l);
    }
    SubqueryParseNode subqueryNode = (SubqueryParseNode) secondChild;
    SelectStatement subquery = fixSubqueryStatement(subqueryNode.getSelectNode());
    String rhsTableAlias = ParseNodeFactory.createTempAlias();
    JoinConditionExtractor conditionExtractor = new JoinConditionExtractor(subquery, resolver, connection, rhsTableAlias);
    ParseNode where = subquery.getWhere() == null ? null : subquery.getWhere().accept(conditionExtractor);
    if (where == subquery.getWhere()) {
        // non-correlated comparison subquery, add LIMIT 2, expectSingleRow = true
        subquery = NODE_FACTORY.select(subquery, NODE_FACTORY.limit(NODE_FACTORY.literal(2)));
        subqueryNode = NODE_FACTORY.subquery(subquery, true);
        l = Lists.newArrayList(l.get(0), subqueryNode);
        node = NODE_FACTORY.comparison(node.getFilterOp(), l.get(0), l.get(1));
        return super.visitLeave(node, l);
    }
    ParseNode rhsNode = null;
    boolean isGroupby = !subquery.getGroupBy().isEmpty();
    boolean isAggregate = subquery.isAggregate();
    List<AliasedNode> aliasedNodes = subquery.getSelect();
    if (aliasedNodes.size() == 1) {
        rhsNode = aliasedNodes.get(0).getNode();
    } else {
        List<ParseNode> nodes = Lists.<ParseNode>newArrayListWithExpectedSize(aliasedNodes.size());
        for (AliasedNode aliasedNode : aliasedNodes) {
            nodes.add(aliasedNode.getNode());
        }
        rhsNode = NODE_FACTORY.rowValueConstructor(nodes);
    }
    List<AliasedNode> additionalSelectNodes = conditionExtractor.getAdditionalSelectNodes();
    List<AliasedNode> selectNodes = Lists.newArrayListWithExpectedSize(additionalSelectNodes.size() + 1);
    selectNodes.add(NODE_FACTORY.aliasedNode(ParseNodeFactory.createTempAlias(), rhsNode));
    selectNodes.addAll(additionalSelectNodes);
    if (!isAggregate) {
        subquery = NODE_FACTORY.select(subquery, subquery.isDistinct(), selectNodes, where);
    } else {
        List<ParseNode> groupbyNodes = Lists.newArrayListWithExpectedSize(additionalSelectNodes.size() + subquery.getGroupBy().size());
        for (AliasedNode aliasedNode : additionalSelectNodes) {
            groupbyNodes.add(aliasedNode.getNode());
        }
        groupbyNodes.addAll(subquery.getGroupBy());
        subquery = NODE_FACTORY.select(subquery, subquery.isDistinct(), selectNodes, where, groupbyNodes, true);
    }
    ParseNode onNode = conditionExtractor.getJoinCondition();
    TableNode rhsTable = NODE_FACTORY.derivedTable(rhsTableAlias, subquery);
    JoinType joinType = isTopNode ? JoinType.Inner : JoinType.Left;
    ParseNode ret = NODE_FACTORY.comparison(node.getFilterOp(), l.get(0), NODE_FACTORY.column(NODE_FACTORY.table(null, rhsTableAlias), selectNodes.get(0).getAlias(), null));
    tableNode = NODE_FACTORY.join(joinType, tableNode, rhsTable, onNode, !isAggregate || isGroupby);
    return ret;
}
Also used : SelectStatement(org.apache.phoenix.parse.SelectStatement) SubqueryParseNode(org.apache.phoenix.parse.SubqueryParseNode) TableNode(org.apache.phoenix.parse.TableNode) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode) AndParseNode(org.apache.phoenix.parse.AndParseNode) ExistsParseNode(org.apache.phoenix.parse.ExistsParseNode) SubqueryParseNode(org.apache.phoenix.parse.SubqueryParseNode) RowValueConstructorParseNode(org.apache.phoenix.parse.RowValueConstructorParseNode) CompoundParseNode(org.apache.phoenix.parse.CompoundParseNode) ComparisonParseNode(org.apache.phoenix.parse.ComparisonParseNode) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) InParseNode(org.apache.phoenix.parse.InParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) JoinType(org.apache.phoenix.parse.JoinTableNode.JoinType) AliasedNode(org.apache.phoenix.parse.AliasedNode)

Example 4 with JoinType

use of org.apache.phoenix.parse.JoinTableNode.JoinType in project phoenix by apache.

the class SubqueryRewriter method visitLeave.

@Override
public ParseNode visitLeave(ExistsParseNode node, List<ParseNode> l) throws SQLException {
    boolean isTopNode = topNode == node;
    if (isTopNode) {
        topNode = null;
    }
    SubqueryParseNode subqueryNode = (SubqueryParseNode) l.get(0);
    SelectStatement subquery = fixSubqueryStatement(subqueryNode.getSelectNode());
    String rhsTableAlias = ParseNodeFactory.createTempAlias();
    JoinConditionExtractor conditionExtractor = new JoinConditionExtractor(subquery, resolver, connection, rhsTableAlias);
    ParseNode where = subquery.getWhere() == null ? null : subquery.getWhere().accept(conditionExtractor);
    if (where == subquery.getWhere()) {
        // non-correlated EXISTS subquery, add LIMIT 1
        subquery = NODE_FACTORY.select(subquery, NODE_FACTORY.limit(NODE_FACTORY.literal(1)));
        subqueryNode = NODE_FACTORY.subquery(subquery, false);
        node = NODE_FACTORY.exists(subqueryNode, node.isNegate());
        return super.visitLeave(node, Collections.<ParseNode>singletonList(subqueryNode));
    }
    List<AliasedNode> additionalSelectNodes = conditionExtractor.getAdditionalSelectNodes();
    List<AliasedNode> selectNodes = Lists.newArrayListWithExpectedSize(additionalSelectNodes.size() + 1);
    selectNodes.add(NODE_FACTORY.aliasedNode(ParseNodeFactory.createTempAlias(), LiteralParseNode.ONE));
    selectNodes.addAll(additionalSelectNodes);
    subquery = NODE_FACTORY.select(subquery, true, selectNodes, where);
    ParseNode onNode = conditionExtractor.getJoinCondition();
    TableNode rhsTable = NODE_FACTORY.derivedTable(rhsTableAlias, subquery);
    JoinType joinType = isTopNode ? (node.isNegate() ? JoinType.Anti : JoinType.Semi) : JoinType.Left;
    ParseNode ret = isTopNode ? null : NODE_FACTORY.isNull(NODE_FACTORY.column(NODE_FACTORY.table(null, rhsTableAlias), selectNodes.get(0).getAlias(), null), !node.isNegate());
    tableNode = NODE_FACTORY.join(joinType, tableNode, rhsTable, onNode, false);
    return ret;
}
Also used : SelectStatement(org.apache.phoenix.parse.SelectStatement) SubqueryParseNode(org.apache.phoenix.parse.SubqueryParseNode) TableNode(org.apache.phoenix.parse.TableNode) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode) AndParseNode(org.apache.phoenix.parse.AndParseNode) ExistsParseNode(org.apache.phoenix.parse.ExistsParseNode) SubqueryParseNode(org.apache.phoenix.parse.SubqueryParseNode) RowValueConstructorParseNode(org.apache.phoenix.parse.RowValueConstructorParseNode) CompoundParseNode(org.apache.phoenix.parse.CompoundParseNode) ComparisonParseNode(org.apache.phoenix.parse.ComparisonParseNode) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) InParseNode(org.apache.phoenix.parse.InParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) JoinType(org.apache.phoenix.parse.JoinTableNode.JoinType) AliasedNode(org.apache.phoenix.parse.AliasedNode)

Example 5 with JoinType

use of org.apache.phoenix.parse.JoinTableNode.JoinType in project phoenix by apache.

the class HashJoinInfo method deserializeHashJoinFromScan.

@SuppressWarnings("unchecked")
public static HashJoinInfo deserializeHashJoinFromScan(Scan scan) {
    byte[] join = scan.getAttribute(HASH_JOIN);
    if (join == null) {
        return null;
    }
    ByteArrayInputStream stream = new ByteArrayInputStream(join);
    try {
        DataInputStream input = new DataInputStream(stream);
        KeyValueSchema joinedSchema = new KeyValueSchema();
        joinedSchema.readFields(input);
        int count = WritableUtils.readVInt(input);
        ImmutableBytesPtr[] joinIds = new ImmutableBytesPtr[count];
        List<Expression>[] joinExpressions = new List[count];
        JoinType[] joinTypes = new JoinType[count];
        boolean[] earlyEvaluation = new boolean[count];
        KeyValueSchema[] schemas = new KeyValueSchema[count];
        int[] fieldPositions = new int[count];
        for (int i = 0; i < count; i++) {
            joinIds[i] = new ImmutableBytesPtr();
            joinIds[i].readFields(input);
            int nExprs = WritableUtils.readVInt(input);
            joinExpressions[i] = new ArrayList<Expression>(nExprs);
            for (int j = 0; j < nExprs; j++) {
                int expressionOrdinal = WritableUtils.readVInt(input);
                Expression expression = ExpressionType.values()[expressionOrdinal].newInstance();
                expression.readFields(input);
                joinExpressions[i].add(expression);
            }
            int type = WritableUtils.readVInt(input);
            joinTypes[i] = JoinType.values()[type];
            earlyEvaluation[i] = input.readBoolean();
            schemas[i] = new KeyValueSchema();
            schemas[i].readFields(input);
            fieldPositions[i] = WritableUtils.readVInt(input);
        }
        Expression postJoinFilterExpression = null;
        int expressionOrdinal = WritableUtils.readVInt(input);
        if (expressionOrdinal != -1) {
            postJoinFilterExpression = ExpressionType.values()[expressionOrdinal].newInstance();
            postJoinFilterExpression.readFields(input);
        }
        int limit = -1;
        boolean forceProjection = false;
        // both to be upgraded in lock step.
        try {
            limit = WritableUtils.readVInt(input);
            forceProjection = input.readBoolean();
        } catch (EOFException ignore) {
        }
        return new HashJoinInfo(joinedSchema, joinIds, joinExpressions, joinTypes, earlyEvaluation, schemas, fieldPositions, postJoinFilterExpression, limit >= 0 ? limit : null, forceProjection);
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        try {
            stream.close();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}
Also used : ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) JoinType(org.apache.phoenix.parse.JoinTableNode.JoinType) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) Expression(org.apache.phoenix.expression.Expression) EOFException(java.io.EOFException) ArrayList(java.util.ArrayList) List(java.util.List) KeyValueSchema(org.apache.phoenix.schema.KeyValueSchema)

Aggregations

JoinType (org.apache.phoenix.parse.JoinTableNode.JoinType)7 AliasedNode (org.apache.phoenix.parse.AliasedNode)5 ParseNode (org.apache.phoenix.parse.ParseNode)5 SelectStatement (org.apache.phoenix.parse.SelectStatement)5 SubqueryParseNode (org.apache.phoenix.parse.SubqueryParseNode)5 TableNode (org.apache.phoenix.parse.TableNode)5 AndParseNode (org.apache.phoenix.parse.AndParseNode)4 ColumnParseNode (org.apache.phoenix.parse.ColumnParseNode)4 ComparisonParseNode (org.apache.phoenix.parse.ComparisonParseNode)4 CompoundParseNode (org.apache.phoenix.parse.CompoundParseNode)4 ExistsParseNode (org.apache.phoenix.parse.ExistsParseNode)4 InParseNode (org.apache.phoenix.parse.InParseNode)4 LiteralParseNode (org.apache.phoenix.parse.LiteralParseNode)4 RowValueConstructorParseNode (org.apache.phoenix.parse.RowValueConstructorParseNode)4 Expression (org.apache.phoenix.expression.Expression)3 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 OrderByNode (org.apache.phoenix.parse.OrderByNode)2 KeyValueSchema (org.apache.phoenix.schema.KeyValueSchema)2