Search in sources :

Example 66 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.

the class TranslateUtils method buildASTNode.

public static ASTNode buildASTNode(String colName) {
    TreeWizard tw = new TreeWizard(adaptor, Windowing2Parser.tokenNames);
    Object o = tw.create(sprintf("(TABLEORCOL Identifier[%s])", colName));
    return (ASTNode) o;
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) TreeWizard(org.antlr.runtime.tree.TreeWizard)

Example 67 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.

the class WhereTranslation method translate.

public static void translate(QueryDef qDef) throws WindowingException {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    QuerySpec spec = qDef.getSpec();
    ASTNode wExpr = spec.getWhereExpr();
    if (wExpr == null)
        return;
    WhereDef whDef = new WhereDef();
    whDef.setExpression(wExpr);
    QueryInputDef iDef = qDef.getInput();
    InputInfo iInfo = tInfo.getInputInfo(iDef);
    ExprNodeDesc exprNode = TranslateUtils.buildExprNode(wExpr, iInfo.getTypeCheckCtx());
    ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(tInfo, exprNode);
    ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
    try {
        ObjectInspectorConverters.getConverter(oi, PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
    } catch (Throwable t) {
        throw new WindowingException("Where Expr must be convertible to a boolean value", t);
    }
    whDef.setExprNode(exprNode);
    whDef.setExprEvaluator(exprEval);
    whDef.setOI(oi);
    qDef.setWhere(whDef);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) WhereDef(com.sap.hadoop.windowing.query2.definition.WhereDef) InputInfo(com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo) QueryInputDef(com.sap.hadoop.windowing.query2.definition.QueryInputDef) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) WindowingException(com.sap.hadoop.windowing.WindowingException) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) QuerySpec(com.sap.hadoop.windowing.query2.specification.QuerySpec)

Example 68 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project incubator-atlas by apache.

the class HiveASTRewriter method rewrite.

public String rewrite(String sourceQry) throws RewriteException {
    String result = sourceQry;
    ASTNode tree = null;
    try {
        ParseDriver pd = new ParseDriver();
        tree = pd.parse(sourceQry, queryContext, true);
        tree = ParseUtils.findRootNonNullToken(tree);
        this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
        rewrite(tree);
        result = toSQL();
    } catch (ParseException e) {
        LOG.error("Could not parse the query {} ", sourceQry, e);
        throw new RewriteException("Could not parse query : ", e);
    }
    return result;
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ParseDriver(org.apache.hadoop.hive.ql.parse.ParseDriver) ParseException(org.apache.hadoop.hive.ql.parse.ParseException)

Example 69 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class CreateDatabaseHook method preAnalyze.

@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
    Hive db;
    try {
        db = context.getHive();
    } catch (HiveException e) {
        throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
    }
    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();
    databaseName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast.getChild(0));
    for (int num = 1; num < numCh; num++) {
        ASTNode child = (ASTNode) ast.getChild(num);
        switch(child.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                try {
                    List<String> dbs = db.getDatabasesByPattern(databaseName);
                    if (dbs != null && dbs.size() > 0) {
                        // db exists
                        return ast;
                    }
                } catch (HiveException e) {
                    throw new SemanticException(e);
                }
                break;
        }
    }
    return ast;
}
Also used : Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 70 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class CreateTableHook method preAnalyze.

@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
    Hive db;
    try {
        db = context.getHive();
    } catch (HiveException e) {
        throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
    }
    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();
    tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast.getChild(0));
    boolean likeTable = false;
    StorageFormat format = new StorageFormat(context.getConf());
    for (int num = 1; num < numCh; num++) {
        ASTNode child = (ASTNode) ast.getChild(num);
        if (format.fillStorageFormat(child)) {
            if (org.apache.commons.lang3.StringUtils.isNotEmpty(format.getStorageHandler())) {
                return ast;
            }
            continue;
        }
        switch(child.getToken().getType()) {
            case // CTAS
            HiveParser.TOK_QUERY:
                throw new SemanticException("Operation not supported. Create table as " + "Select is not a valid operation.");
            case HiveParser.TOK_ALTERTABLE_BUCKETS:
                break;
            case HiveParser.TOK_LIKETABLE:
                likeTable = true;
                break;
            case HiveParser.TOK_IFNOTEXISTS:
                try {
                    List<String> tables = db.getTablesByPattern(tableName);
                    if (tables != null && tables.size() > 0) {
                        // exists
                        return ast;
                    }
                } catch (HiveException e) {
                    throw new SemanticException(e);
                }
                break;
            case HiveParser.TOK_TABLEPARTCOLS:
                List<FieldSchema> partCols = BaseSemanticAnalyzer.getColumns(child, false, context.getConf());
                for (FieldSchema fs : partCols) {
                    if (!fs.getType().equalsIgnoreCase("string")) {
                        throw new SemanticException("Operation not supported. HCatalog only " + "supports partition columns of type string. " + "For column: " + fs.getName() + " Found type: " + fs.getType());
                    }
                }
                break;
        }
    }
    if (!likeTable && (format.getInputFormat() == null || format.getOutputFormat() == null)) {
        throw new SemanticException("STORED AS specification is either incomplete or incorrect.");
    }
    return ast;
}
Also used : Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) StorageFormat(org.apache.hadoop.hive.ql.parse.StorageFormat) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4