Search in sources :

Example 71 with CommonTree

use of org.antlr.runtime.tree.CommonTree in project jabref by JabRef.

the class VM method charStream2CommonTree.

private static CommonTree charStream2CommonTree(CharStream bst) throws RecognitionException {
    BstLexer lex = new BstLexer(bst);
    CommonTokenStream tokens = new CommonTokenStream(lex);
    BstParser parser = new BstParser(tokens);
    BstParser.program_return r = parser.program();
    return (CommonTree) r.getTree();
}
Also used : CommonTokenStream(org.antlr.runtime.CommonTokenStream) CommonTree(org.antlr.runtime.tree.CommonTree)

Example 72 with CommonTree

use of org.antlr.runtime.tree.CommonTree in project SQLWindowing by hbutani.

the class WindowingShell method checkQuery.

public void checkQuery(String query) throws WindowingException {
    Windowing2Lexer lexer;
    CommonTokenStream tokens;
    Windowing2Parser parser = null;
    @SuppressWarnings("unused") CommonTree t;
    // CommonTreeNodeStream nodes;
    String err;
    try {
        lexer = new Windowing2Lexer(new ANTLRStringStream(query));
        tokens = new CommonTokenStream(lexer);
        parser = new Windowing2Parser(tokens);
        parser.setTreeAdaptor(TranslateUtils.adaptor);
        t = (CommonTree) parser.query().getTree();
        err = parser.getWindowingParseErrors();
        if (err != null) {
            throw new WindowingException(err);
        }
    } catch (Throwable te) {
        err = parser.getWindowingParseErrors();
        if (err != null) {
            throw new WindowingException(err);
        }
        throw new WindowingException("Parse Error:" + te.toString(), te);
    }
}
Also used : Windowing2Lexer(com.sap.hadoop.windowing.parser.Windowing2Lexer) ANTLRStringStream(org.antlr.runtime.ANTLRStringStream) CommonTokenStream(org.antlr.runtime.CommonTokenStream) CommonTree(org.antlr.runtime.tree.CommonTree) Windowing2Parser(com.sap.hadoop.windowing.parser.Windowing2Parser) WindowingException(com.sap.hadoop.windowing.WindowingException)

Example 73 with CommonTree

use of org.antlr.runtime.tree.CommonTree in project hive by apache.

the class DDLSemanticAnalyzer method getFullPartitionSpecs.

/**
 * Get the partition specs from the tree. This stores the full specification
 * with the comparator operator into the output list.
 *
 * @param ast Tree to extract partitions from.
 * @param tab Table.
 * @return    Map of partitions by prefix length. Most of the time prefix length will
 *            be the same for all partition specs, so we can just OR the expressions.
 */
private Map<Integer, List<ExprNodeGenericFuncDesc>> getFullPartitionSpecs(CommonTree ast, Table tab, boolean canGroupExprs) throws SemanticException {
    String defaultPartitionName = HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME);
    Map<String, String> colTypes = new HashMap<String, String>();
    for (FieldSchema fs : tab.getPartitionKeys()) {
        colTypes.put(fs.getName().toLowerCase(), fs.getType());
    }
    Map<Integer, List<ExprNodeGenericFuncDesc>> result = new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
    for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) {
        Tree partSpecTree = ast.getChild(childIndex);
        if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) {
            continue;
        }
        ExprNodeGenericFuncDesc expr = null;
        HashSet<String> names = new HashSet<String>(partSpecTree.getChildCount());
        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
            CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
            assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
            String key = stripIdentifierQuotes(partSpecSingleKey.getChild(0).getText()).toLowerCase();
            String operator = partSpecSingleKey.getChild(1).getText();
            ASTNode partValNode = (ASTNode) partSpecSingleKey.getChild(2);
            TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
            ExprNodeConstantDesc valExpr = (ExprNodeConstantDesc) TypeCheckProcFactory.genExprNode(partValNode, typeCheckCtx).get(partValNode);
            Object val = valExpr.getValue();
            boolean isDefaultPartitionName = val.equals(defaultPartitionName);
            String type = colTypes.get(key);
            PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
            if (type == null) {
                throw new SemanticException("Column " + key + " not found");
            }
            // Create the corresponding hive expression to filter on partition columns.
            if (!isDefaultPartitionName) {
                if (!valExpr.getTypeString().equals(type)) {
                    Converter converter = ObjectInspectorConverters.getConverter(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(valExpr.getTypeInfo()), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti));
                    val = converter.convert(valExpr.getValue());
                }
            }
            ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
            ExprNodeGenericFuncDesc op;
            if (!isDefaultPartitionName) {
                op = makeBinaryPredicate(operator, column, new ExprNodeConstantDesc(pti, val));
            } else {
                GenericUDF originalOp = FunctionRegistry.getFunctionInfo(operator).getGenericUDF();
                String fnName;
                if (FunctionRegistry.isEq(originalOp)) {
                    fnName = "isnull";
                } else if (FunctionRegistry.isNeq(originalOp)) {
                    fnName = "isnotnull";
                } else {
                    throw new SemanticException("Cannot use " + operator + " in a default partition spec; only '=' and '!=' are allowed.");
                }
                op = makeUnaryPredicate(fnName, column);
            }
            // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
            expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
            names.add(key);
        }
        if (expr == null) {
            continue;
        }
        // We got the expr for one full partition spec. Determine the prefix length.
        int prefixLength = calculatePartPrefix(tab, names);
        List<ExprNodeGenericFuncDesc> orExpr = result.get(prefixLength);
        // If we don't, create a new separate filter. In most cases there will only be one.
        if (orExpr == null) {
            result.put(prefixLength, Lists.newArrayList(expr));
        } else if (canGroupExprs) {
            orExpr.set(0, makeBinaryPredicate("or", expr, orExpr.get(0)));
        } else {
            orExpr.add(expr);
        }
    }
    return result;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CommonTree(org.antlr.runtime.tree.CommonTree) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList) HashSet(java.util.HashSet)

Example 74 with CommonTree

use of org.antlr.runtime.tree.CommonTree in project hive by apache.

the class ParseUtils method processSetColsNode.

/**
 * Replaces a spurious TOK_SETCOLREF added by parser with column names referring to the query
 * in e.g. a union. This is to maintain the expectations that some code, like order by position
 * alias, might have about not having ALLCOLREF. If it cannot find the columns with confidence
 * it will just replace SETCOLREF with ALLCOLREF. Most of the cases where that happens are
 * easy to work around in the query (e.g. by adding column aliases in the union).
 * @param setCols TOK_SETCOLREF ASTNode.
 * @param searcher AST searcher to reuse.
 */
private static void processSetColsNode(ASTNode setCols, ASTSearcher searcher) {
    searcher.reset();
    CommonTree rootNode = setCols;
    while (rootNode != null && rootNode.getType() != HiveParser.TOK_INSERT) {
        rootNode = rootNode.parent;
    }
    if (rootNode == null || rootNode.parent == null) {
        // Couldn't find the parent insert; replace with ALLCOLREF.
        LOG.debug("Replacing SETCOLREF with ALLCOLREF because we couldn't find the root INSERT");
        setCols.token.setType(HiveParser.TOK_ALLCOLREF);
        return;
    }
    // TOK_QUERY above insert
    rootNode = rootNode.parent;
    Tree fromNode = null;
    for (int j = 0; j < rootNode.getChildCount(); ++j) {
        Tree child = rootNode.getChild(j);
        if (child.getType() == HiveParser.TOK_FROM) {
            fromNode = child;
            break;
        }
    }
    if (!(fromNode instanceof ASTNode)) {
        // Couldn't find the from that contains subquery; replace with ALLCOLREF.
        LOG.debug("Replacing SETCOLREF with ALLCOLREF because we couldn't find the FROM");
        setCols.token.setType(HiveParser.TOK_ALLCOLREF);
        return;
    }
    // We are making what we are trying to do more explicit if there's a union alias; so
    // that if we do something we didn't expect to do, it'd be more likely to fail.
    String alias = null;
    if (fromNode.getChildCount() > 0) {
        Tree fromWhat = fromNode.getChild(0);
        if (fromWhat.getType() == HiveParser.TOK_SUBQUERY && fromWhat.getChildCount() > 1) {
            Tree child = fromWhat.getChild(fromWhat.getChildCount() - 1);
            if (child.getType() == HiveParser.Identifier) {
                alias = child.getText();
            }
        }
    }
    // Note: we assume that this isn't an already malformed query;
    // we don't check for that here - it will fail later anyway.
    // First, we find the SELECT closest to the top.
    ASTNode select = searcher.simpleBreadthFirstSearchAny((ASTNode) fromNode, HiveParser.TOK_SELECT, HiveParser.TOK_SELECTDI);
    if (select == null) {
        // Couldn't find the from that contains subquery; replace with ALLCOLREF.
        LOG.debug("Replacing SETCOLREF with ALLCOLREF because we couldn't find the SELECT");
        setCols.token.setType(HiveParser.TOK_ALLCOLREF);
        return;
    }
    // Then, find the leftmost logical sibling select, because that's what Hive uses for aliases.
    while (true) {
        CommonTree queryOfSelect = select.parent;
        while (queryOfSelect != null && queryOfSelect.getType() != HiveParser.TOK_QUERY) {
            queryOfSelect = queryOfSelect.parent;
        }
        // We should have some QUERY; and also its parent because by supposition we are in subq.
        if (queryOfSelect == null || queryOfSelect.parent == null) {
            LOG.debug("Replacing SETCOLREF with ALLCOLREF because we couldn't find the QUERY");
            setCols.token.setType(HiveParser.TOK_ALLCOLREF);
            return;
        }
        // We are the left-most child.
        if (queryOfSelect.childIndex == 0)
            break;
        Tree moreToTheLeft = queryOfSelect.parent.getChild(0);
        Preconditions.checkState(moreToTheLeft != queryOfSelect);
        ASTNode newSelect = searcher.simpleBreadthFirstSearchAny((ASTNode) moreToTheLeft, HiveParser.TOK_SELECT, HiveParser.TOK_SELECTDI);
        Preconditions.checkState(newSelect != select);
        select = newSelect;
    // Repeat the procedure for the new select.
    }
    // Found the proper columns.
    List<ASTNode> newChildren = new ArrayList<>(select.getChildCount());
    HashSet<String> aliases = new HashSet<>();
    for (int i = 0; i < select.getChildCount(); ++i) {
        Tree selExpr = select.getChild(i);
        if (selExpr.getType() == HiveParser.QUERY_HINT)
            continue;
        assert selExpr.getType() == HiveParser.TOK_SELEXPR;
        assert selExpr.getChildCount() > 0;
        // Examine the last child. It could be an alias.
        Tree child = selExpr.getChild(selExpr.getChildCount() - 1);
        switch(child.getType()) {
            case HiveParser.TOK_SETCOLREF:
                // We have a nested setcolref. Process that and start from scratch TODO: use stack?
                processSetColsNode((ASTNode) child, searcher);
                processSetColsNode(setCols, searcher);
                return;
            case HiveParser.TOK_ALLCOLREF:
                // We should find an alias of this insert and do (alias).*. This however won't fix e.g.
                // positional order by alias case, cause we'd still have a star on the top level. Bail.
                LOG.debug("Replacing SETCOLREF with ALLCOLREF because of nested ALLCOLREF");
                setCols.token.setType(HiveParser.TOK_ALLCOLREF);
                return;
            case HiveParser.TOK_TABLE_OR_COL:
                Tree idChild = child.getChild(0);
                assert idChild.getType() == HiveParser.Identifier : idChild;
                if (!createChildColumnRef(idChild, alias, newChildren, aliases)) {
                    setCols.token.setType(HiveParser.TOK_ALLCOLREF);
                    return;
                }
                break;
            case HiveParser.Identifier:
                if (!createChildColumnRef(child, alias, newChildren, aliases)) {
                    setCols.token.setType(HiveParser.TOK_ALLCOLREF);
                    return;
                }
                break;
            case HiveParser.DOT:
                {
                    Tree colChild = child.getChild(child.getChildCount() - 1);
                    assert colChild.getType() == HiveParser.Identifier : colChild;
                    if (!createChildColumnRef(colChild, alias, newChildren, aliases)) {
                        setCols.token.setType(HiveParser.TOK_ALLCOLREF);
                        return;
                    }
                    break;
                }
            default:
                // Not really sure how to refer to this (or if we can).
                // TODO: We could find a different from branch for the union, that might have an alias?
                // Or we could add an alias here to refer to, but that might break other branches.
                LOG.debug("Replacing SETCOLREF with ALLCOLREF because of the nested node " + child.getType() + " " + child.getText());
                setCols.token.setType(HiveParser.TOK_ALLCOLREF);
                return;
        }
    }
    // Insert search in the beginning would have failed if these parents didn't exist.
    ASTNode parent = (ASTNode) setCols.parent.parent;
    int t = parent.getType();
    assert t == HiveParser.TOK_SELECT || t == HiveParser.TOK_SELECTDI : t;
    int ix = setCols.parent.childIndex;
    parent.deleteChild(ix);
    for (ASTNode node : newChildren) {
        parent.insertChild(ix++, node);
    }
}
Also used : CommonTree(org.antlr.runtime.tree.CommonTree) ArrayList(java.util.ArrayList) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) HashSet(java.util.HashSet)

Example 75 with CommonTree

use of org.antlr.runtime.tree.CommonTree in project Apktool by iBotPeaches.

the class SmaliMod method assembleSmaliFile.

public static boolean assembleSmaliFile(File smaliFile, DexBuilder dexBuilder, boolean verboseErrors, boolean printTokens) throws IOException, RecognitionException {
    CommonTokenStream tokens;
    LexerErrorInterface lexer;
    InputStream is = new FileInputStream(smaliFile);
    InputStreamReader reader = new InputStreamReader(is, "UTF-8");
    lexer = new smaliFlexLexer(reader);
    ((smaliFlexLexer) lexer).setSourceFile(smaliFile);
    tokens = new CommonTokenStream((TokenSource) lexer);
    if (printTokens) {
        tokens.getTokens();
        for (int i = 0; i < tokens.size(); i++) {
            Token token = tokens.get(i);
            if (token.getChannel() == smaliParser.HIDDEN) {
                continue;
            }
            System.out.println(smaliParser.tokenNames[token.getType()] + ": " + token.getText());
        }
    }
    smaliParser parser = new smaliParser(tokens);
    parser.setVerboseErrors(verboseErrors);
    smaliParser.smali_file_return result = parser.smali_file();
    if (parser.getNumberOfSyntaxErrors() > 0 || lexer.getNumberOfSyntaxErrors() > 0) {
        return false;
    }
    CommonTree t = (CommonTree) result.getTree();
    CommonTreeNodeStream treeStream = new CommonTreeNodeStream(t);
    treeStream.setTokenStream(tokens);
    smaliTreeWalker dexGen = new smaliTreeWalker(treeStream);
    dexGen.setVerboseErrors(verboseErrors);
    dexGen.setDexBuilder(dexBuilder);
    dexGen.smali_file();
    is.close();
    reader.close();
    return dexGen.getNumberOfSyntaxErrors() == 0;
}
Also used : CommonTree(org.antlr.runtime.tree.CommonTree) CommonTreeNodeStream(org.antlr.runtime.tree.CommonTreeNodeStream)

Aggregations

CommonTree (org.antlr.runtime.tree.CommonTree)155 RecognitionException (org.antlr.runtime.RecognitionException)132 Token (org.antlr.runtime.Token)106 RewriteRuleSubtreeStream (org.antlr.runtime.tree.RewriteRuleSubtreeStream)81 RewriteRuleTokenStream (org.antlr.runtime.tree.RewriteRuleTokenStream)62 NoViableAltException (org.antlr.runtime.NoViableAltException)33 RewriteEarlyExitException (org.antlr.runtime.tree.RewriteEarlyExitException)15 CommonTokenStream (org.antlr.runtime.CommonTokenStream)12 CommonTreeNodeStream (org.antlr.runtime.tree.CommonTreeNodeStream)8 ANTLRStringStream (org.antlr.runtime.ANTLRStringStream)6 EarlyExitException (org.antlr.runtime.EarlyExitException)4 MismatchedSetException (org.antlr.runtime.MismatchedSetException)4 WindowingException (com.sap.hadoop.windowing.WindowingException)3 FileInputStream (java.io.FileInputStream)3 InputStreamReader (java.io.InputStreamReader)3 CharStream (org.antlr.runtime.CharStream)3 Tree (org.antlr.runtime.tree.Tree)3 ArrayList (java.util.ArrayList)2 HashSet (java.util.HashSet)2 TokenSource (org.antlr.runtime.TokenSource)2