Search in sources :

Example 91 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ExprNodeConverter method getPSpec.

private PartitioningSpec getPSpec(RexWindow window) {
    PartitioningSpec partitioning = new PartitioningSpec();
    Schema schema = new Schema(tabAlias, inputRowType.getFieldList());
    if (window.partitionKeys != null && !window.partitionKeys.isEmpty()) {
        PartitionSpec pSpec = new PartitionSpec();
        for (RexNode pk : window.partitionKeys) {
            PartitionExpression exprSpec = new PartitionExpression();
            ASTNode astNode = pk.accept(new RexVisitor(schema));
            exprSpec.setExpression(astNode);
            pSpec.addExpression(exprSpec);
        }
        partitioning.setPartSpec(pSpec);
    }
    if (window.orderKeys != null && !window.orderKeys.isEmpty()) {
        OrderSpec oSpec = new OrderSpec();
        for (RexFieldCollation ok : window.orderKeys) {
            OrderExpression exprSpec = new OrderExpression();
            Order order = ok.getDirection() == RelFieldCollation.Direction.ASCENDING ? Order.ASC : Order.DESC;
            NullOrder nullOrder;
            if (ok.right.contains(SqlKind.NULLS_FIRST)) {
                nullOrder = NullOrder.NULLS_FIRST;
            } else if (ok.right.contains(SqlKind.NULLS_LAST)) {
                nullOrder = NullOrder.NULLS_LAST;
            } else {
                // Default
                nullOrder = ok.getDirection() == RelFieldCollation.Direction.ASCENDING ? NullOrder.NULLS_FIRST : NullOrder.NULLS_LAST;
            }
            exprSpec.setOrder(order);
            exprSpec.setNullOrder(nullOrder);
            ASTNode astNode = ok.left.accept(new RexVisitor(schema));
            exprSpec.setExpression(astNode);
            oSpec.addExpression(exprSpec);
        }
        partitioning.setOrderSpec(oSpec);
    }
    return partitioning;
}
Also used : NullOrder(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder) Order(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order) OrderSpec(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderSpec) RexVisitor(org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTConverter.RexVisitor) PartitionExpression(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionExpression) OrderExpression(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderExpression) Schema(org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTConverter.Schema) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) NullOrder(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder) PartitionSpec(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionSpec) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) PartitioningSpec(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec) RexNode(org.apache.calcite.rex.RexNode)

Example 92 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class TestExprProcessorGetFuncExpr method testLookupFunctionOnDemand.

@Test
public void testLookupFunctionOnDemand() throws Exception {
    TypeCheckProcFactory.DefaultExprProcessor defaultExprProcessor = ExprNodeTypeCheck.getExprNodeDefaultExprProcessor();
    ASTNode funcExpr = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION, "TOK_FUNCTION"));
    funcExpr.addChild(new ASTNode(new CommonToken(HiveParser.Identifier, "myupper")));
    funcExpr.addChild(new ASTNode(new CommonToken(HiveParser.StringLiteral, "test")));
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "test"));
    // getXpathOrFuncExprNodeDesc cannot access from outside package
    ExprNodeDesc exprNodeDesc = (ExprNodeDesc) defaultExprProcessor.getXpathOrFuncExprNodeDesc(funcExpr, true, children, new TypeCheckCtx(null));
    Assert.assertNotNull(exprNodeDesc);
    Assert.assertNotNull(((ExprNodeGenericFuncDesc) exprNodeDesc).getGenericUDF());
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ArrayList(java.util.ArrayList) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) CommonToken(org.antlr.runtime.CommonToken) Test(org.junit.Test)

Example 93 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class Context method getDestNamePrefix.

/**
 * The suffix is always relative to a given ASTNode.
 * We need this so that FileSinkOperatorS corresponding to different branches of a multi-insert
 * statement which represents a SQL Merge statement get marked correctly with
 * {@link org.apache.hadoop.hive.ql.io.AcidUtils.Operation}.  See usages
 * of {@link #getDestNamePrefix(ASTNode, QB)} and
 * {@link org.apache.hadoop.hive.ql.parse.SemanticAnalyzer#updating(String)} and
 * {@link org.apache.hadoop.hive.ql.parse.SemanticAnalyzer#deleting(String)}.
 */
public DestClausePrefix getDestNamePrefix(ASTNode curNode, QB queryBlock) {
    assert curNode != null : "must supply curNode";
    if (queryBlock.isInsideView() || queryBlock.getParseInfo().getIsSubQ()) {
        /**
         * Views get inlined in the logical plan but not in the AST
         * {@link org.apache.hadoop.hive.ql.parse.SemanticAnalyzer#replaceViewReferenceWithDefinition(QB, Table, String, String)}
         * Since here we only care to identify clauses representing Update/Delete which are not
         * possible inside a view/subquery, we can immediately return the default {@link DestClausePrefix.INSERT}
         */
        return DestClausePrefix.INSERT;
    }
    if (curNode.getType() != HiveParser.TOK_INSERT_INTO) {
        // select statement
        assert curNode.getType() == HiveParser.TOK_DESTINATION;
        if (operation == Operation.OTHER) {
            // not an 'interesting' op
            return DestClausePrefix.INSERT;
        }
        // if it is an 'interesting' op but it's a select it must be a sub-query or a derived table
        // it doesn't require a special Acid code path - the reset of the code here is to ensure
        // the tree structure is what we expect
        boolean thisIsInASubquery = false;
        parentLoop: while (curNode.getParent() != null) {
            curNode = (ASTNode) curNode.getParent();
            switch(curNode.getType()) {
                case HiveParser.TOK_SUBQUERY_EXPR:
                // this is a real subquery (foo IN (select ...))
                case HiveParser.TOK_SUBQUERY:
                // strictly speaking SetOps should have a TOK_SUBQUERY parent so next 6 items are redundant
                case HiveParser.TOK_UNIONALL:
                case HiveParser.TOK_UNIONDISTINCT:
                case HiveParser.TOK_EXCEPTALL:
                case HiveParser.TOK_EXCEPTDISTINCT:
                case HiveParser.TOK_INTERSECTALL:
                case HiveParser.TOK_INTERSECTDISTINCT:
                    thisIsInASubquery = true;
                    break parentLoop;
            }
        }
        if (!thisIsInASubquery) {
            throw new IllegalStateException("Expected '" + getMatchedText(curNode) + "' to be in sub-query or set operation.");
        }
        return DestClausePrefix.INSERT;
    }
    switch(operation) {
        case OTHER:
            return DestClausePrefix.INSERT;
        case UPDATE:
            return DestClausePrefix.UPDATE;
        case DELETE:
            return DestClausePrefix.DELETE;
        case MERGE:
            /* This is the structure expected here
        HiveParser.TOK_QUERY;
          HiveParser.TOK_FROM
          HiveParser.TOK_INSERT;
            HiveParser.TOK_INSERT_INTO;
          HiveParser.TOK_INSERT;
            HiveParser.TOK_INSERT_INTO;
          .....*/
            ASTNode insert = (ASTNode) curNode.getParent();
            assert insert != null && insert.getType() == HiveParser.TOK_INSERT;
            ASTNode query = (ASTNode) insert.getParent();
            assert query != null && query.getType() == HiveParser.TOK_QUERY;
            for (int childIdx = 1; childIdx < query.getChildCount(); childIdx++) {
                // 1st child is TOK_FROM
                assert query.getChild(childIdx).getType() == HiveParser.TOK_INSERT;
                if (insert == query.getChild(childIdx)) {
                    DestClausePrefix prefix = insertBranchToNamePrefix.get(childIdx);
                    if (prefix == null) {
                        throw new IllegalStateException("Found a node w/o branch mapping: '" + getMatchedText(insert) + "'");
                    }
                    return prefix;
                }
            }
            throw new IllegalStateException("Could not locate '" + getMatchedText(insert) + "'");
        default:
            throw new IllegalStateException("Unexpected operation: " + operation);
    }
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 94 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class TestTransactionStatement method testTxnCommitRollback.

@Test
public void testTxnCommitRollback() throws ParseException {
    ASTNode ast = parse("COMMIT");
    Assert.assertEquals("AST doesn't match", "tok_commit", ast.toStringTree());
    ast = parse("COMMIT WORK");
    Assert.assertEquals("AST doesn't match", "tok_commit", ast.toStringTree());
    ast = parse("ROLLBACK");
    Assert.assertEquals("AST doesn't match", "tok_rollback", ast.toStringTree());
    ast = parse("ROLLBACK WORK");
    Assert.assertEquals("AST doesn't match", "tok_rollback", ast.toStringTree());
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Test(org.junit.Test)

Example 95 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class TestTransactionStatement method testAutoCommit.

@Test
public void testAutoCommit() throws ParseException {
    ASTNode ast = parse("SET AUTOCOMMIT TRUE");
    Assert.assertEquals("AST doesn't match", "(tok_set_autocommit tok_true)", ast.toStringTree());
    ast = parse("SET AUTOCOMMIT FALSE");
    Assert.assertEquals("AST doesn't match", "(tok_set_autocommit tok_false)", ast.toStringTree());
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Test(org.junit.Test)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4