Search in sources :

Example 1 with Tree

use of org.antlr.runtime.tree.Tree in project hive by apache.

the class BaseSemanticAnalyzer method processForeignKeys.

/**
   * Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list
   * @param parent  Parent of the foreign key token node
   * @param child Foreign Key token node
   * @param foreignKeys SQLForeignKey list
   * @throws SemanticException
   */
protected static void processForeignKeys(ASTNode parent, ASTNode child, List<SQLForeignKey> foreignKeys) throws SemanticException {
    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
    // The ANTLR grammar looks like :
    // 1.  KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user specifies the constraint name (i.e. child.getChildCount() == 7)
    // 2.  KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $fkCols  $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user does not specify the constraint name (i.e. child.getChildCount() == 6)
    boolean userSpecifiedConstraintName = child.getChildCount() == 7;
    int fkIndex = userSpecifiedConstraintName ? 1 : 0;
    int ptIndex = fkIndex + 1;
    int pkIndex = ptIndex + 1;
    int relyIndex = pkIndex + 1;
    if (child.getChildCount() <= fkIndex || child.getChildCount() <= pkIndex || child.getChildCount() <= ptIndex) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg());
    }
    String[] parentDBTbl = getQualifiedTableName((ASTNode) child.getChild(ptIndex));
    if (child.getChild(fkIndex).getChildCount() != child.getChild(pkIndex).getChildCount()) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" The number of foreign key columns should be same as number of parent key columns "));
    }
    for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) {
        SQLForeignKey sqlForeignKey = new SQLForeignKey();
        Tree fkgrandChild = child.getChild(fkIndex).getChild(j);
        checkColumnName(fkgrandChild.getText());
        boolean rely = child.getChild(relyIndex).getType() == HiveParser.TOK_VALIDATE;
        boolean enable = child.getChild(relyIndex + 1).getType() == HiveParser.TOK_ENABLE;
        boolean validate = child.getChild(relyIndex + 2).getType() == HiveParser.TOK_VALIDATE;
        if (enable) {
            throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" ENABLE feature not supported yet"));
        }
        if (validate) {
            throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" VALIDATE feature not supported yet"));
        }
        sqlForeignKey.setRely_cstr(rely);
        sqlForeignKey.setPktable_db(parentDBTbl[0]);
        sqlForeignKey.setPktable_name(parentDBTbl[1]);
        sqlForeignKey.setFktable_db(qualifiedTabName[0]);
        sqlForeignKey.setFktable_name(qualifiedTabName[1]);
        sqlForeignKey.setFkcolumn_name(unescapeIdentifier(fkgrandChild.getText().toLowerCase()));
        Tree pkgrandChild = child.getChild(pkIndex).getChild(j);
        sqlForeignKey.setPkcolumn_name(unescapeIdentifier(pkgrandChild.getText().toLowerCase()));
        sqlForeignKey.setKey_seq(j + 1);
        if (userSpecifiedConstraintName) {
            sqlForeignKey.setFk_name(unescapeIdentifier(child.getChild(0).getText().toLowerCase()));
        }
        foreignKeys.add(sqlForeignKey);
    }
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Tree(org.antlr.runtime.tree.Tree)

Example 2 with Tree

use of org.antlr.runtime.tree.Tree in project hive by apache.

the class BaseSemanticAnalyzer method getColumns.

/**
   * Get the list of FieldSchema out of the ASTNode.
   * Additionally, populate the primaryKeys and foreignKeys if any.
   */
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys) throws SemanticException {
    List<FieldSchema> colList = new ArrayList<FieldSchema>();
    int numCh = ast.getChildCount();
    List<PKInfo> pkInfos = new ArrayList<PKInfo>();
    Map<String, FieldSchema> nametoFS = new HashMap<String, FieldSchema>();
    Tree parent = ast.getParent();
    for (int i = 0; i < numCh; i++) {
        FieldSchema col = new FieldSchema();
        ASTNode child = (ASTNode) ast.getChild(i);
        if (child.getToken().getType() == HiveParser.TOK_PRIMARY_KEY) {
            processPrimaryKeyInfos(child, pkInfos);
        } else if (child.getToken().getType() == HiveParser.TOK_FOREIGN_KEY) {
            processForeignKeys((ASTNode) parent, child, foreignKeys);
        } else {
            Tree grandChild = child.getChild(0);
            if (grandChild != null) {
                String name = grandChild.getText();
                if (lowerCase) {
                    name = name.toLowerCase();
                }
                checkColumnName(name);
                // child 0 is the name of the column
                col.setName(unescapeIdentifier(name));
                // child 1 is the type of the column
                ASTNode typeChild = (ASTNode) (child.getChild(1));
                col.setType(getTypeStringFromAST(typeChild));
                // child 2 is the optional comment of the column
                if (child.getChildCount() == 3) {
                    col.setComment(unescapeSQLString(child.getChild(2).getText()));
                }
            }
            nametoFS.put(col.getName(), col);
            colList.add(col);
        }
    }
    if (!pkInfos.isEmpty()) {
        processPrimaryKeys((ASTNode) parent, pkInfos, primaryKeys, nametoFS);
    }
    return colList;
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) Tree(org.antlr.runtime.tree.Tree)

Example 3 with Tree

use of org.antlr.runtime.tree.Tree in project hive by apache.

the class ParseUtils method sameTree.

public static boolean sameTree(ASTNode node, ASTNode otherNode) {
    if (node == null && otherNode == null) {
        return true;
    }
    if ((node == null && otherNode != null) || (node != null && otherNode == null)) {
        return false;
    }
    Stack<Tree> stack = new Stack<Tree>();
    stack.push(node);
    Stack<Tree> otherStack = new Stack<Tree>();
    otherStack.push(otherNode);
    while (!stack.empty() && !otherStack.empty()) {
        Tree p = stack.pop();
        Tree otherP = otherStack.pop();
        if (p.isNil() != otherP.isNil()) {
            return false;
        }
        if (!p.isNil()) {
            if (!p.toString().equals(otherP.toString())) {
                return false;
            }
        }
        if (p.getChildCount() != otherP.getChildCount()) {
            return false;
        }
        for (int i = p.getChildCount() - 1; i >= 0; i--) {
            Tree t = p.getChild(i);
            stack.push(t);
            Tree otherT = otherP.getChild(i);
            otherStack.push(otherT);
        }
    }
    return stack.empty() && otherStack.empty();
}
Also used : CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) Stack(java.util.Stack)

Example 4 with Tree

use of org.antlr.runtime.tree.Tree in project gerrit by GerritCodeReview.

the class QueryParserTest method projectBare.

@Test
public void projectBare() throws QueryParseException {
    Tree r;
    r = parse("project:tools/gerrit");
    assertSingleWord("project", "tools/gerrit", r);
    r = parse("project:tools/*");
    assertSingleWord("project", "tools/*", r);
}
Also used : Tree(org.antlr.runtime.tree.Tree) Test(org.junit.Test)

Example 5 with Tree

use of org.antlr.runtime.tree.Tree in project cuba by cuba-platform.

the class QueryAnalyzerTest method replaceOrderBy.

@Test
public void replaceOrderBy() throws RecognitionException {
    DomainModel model = prepareDomainModel();
    QueryTreeTransformer qa = new QueryTreeTransformer();
    qa.prepare(model, "select c from Car c order by c.model");
    CommonTree tree = qa.getTree();
    CommonTree orderByNode = (CommonTree) tree.getFirstChildWithType(JPA2Lexer.T_ORDER_BY);
    Tree orderByField = orderByNode.getFirstChildWithType(JPA2Lexer.T_ORDER_BY_FIELD);
    assertEquals(1, orderByField.getChildCount());
    PathNode pathNode = (PathNode) orderByField.getChild(0);
    assertEquals("c", pathNode.getEntityVariableName());
    assertEquals("model", pathNode.getChild(0).getText());
    pathNode = new PathNode(JPA2Lexer.T_SELECTED_FIELD, "c");
    pathNode.addDefaultChild("regNumber");
    qa.replaceOrderBy(true, new PathEntityReference(pathNode, "Car"));
    orderByNode = (CommonTree) tree.getFirstChildWithType(JPA2Lexer.T_ORDER_BY);
    orderByField = orderByNode.getFirstChildWithType(JPA2Lexer.T_ORDER_BY_FIELD);
    assertEquals(2, orderByField.getChildCount());
    pathNode = (PathNode) orderByField.getChild(0);
    assertEquals("c", pathNode.getEntityVariableName());
    assertEquals("regNumber", pathNode.getChild(0).getText());
    assertEquals("desc", orderByField.getChild(1).getText());
}
Also used : PathEntityReference(com.haulmont.cuba.core.sys.jpql.transform.PathEntityReference) DomainModel(com.haulmont.cuba.core.sys.jpql.DomainModel) CommonTree(org.antlr.runtime.tree.CommonTree) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) QueryTreeTransformer(com.haulmont.cuba.core.sys.jpql.transform.QueryTreeTransformer) Test(org.junit.Test)

Aggregations

Tree (org.antlr.runtime.tree.Tree)215 Test (org.junit.Test)98 RuleReturnScope (org.antlr.runtime.RuleReturnScope)82 CommonTree (org.antlr.runtime.tree.CommonTree)39 ArrayList (java.util.ArrayList)27 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)18 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)18 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)18 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)18 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)12 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)11 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)11 HashMap (java.util.HashMap)9 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)9 Node (org.apache.hadoop.hive.ql.lib.Node)9 IOException (java.io.IOException)8 HashSet (java.util.HashSet)8 LinkedHashMap (java.util.LinkedHashMap)8 List (java.util.List)8 GrammarAST (org.antlr.v4.tool.ast.GrammarAST)8