Search in sources :

Example 81 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class MatchPath method createSelectListRR.

/*
   * add array<struct> to the list of columns
   */
protected static RowResolver createSelectListRR(MatchPath evaluator, PTFInputDef inpDef) throws SemanticException {
    RowResolver rr = new RowResolver();
    RowResolver inputRR = inpDef.getOutputShape().getRr();
    evaluator.inputColumnNamesMap = new HashMap<String, String>();
    ArrayList<String> inputColumnNames = new ArrayList<String>();
    ArrayList<ObjectInspector> inpColOIs = new ArrayList<ObjectInspector>();
    for (ColumnInfo inpCInfo : inputRR.getColumnInfos()) {
        ColumnInfo cInfo = new ColumnInfo(inpCInfo);
        String colAlias = cInfo.getAlias();
        String[] tabColAlias = inputRR.reverseLookup(inpCInfo.getInternalName());
        if (tabColAlias != null) {
            colAlias = tabColAlias[1];
        }
        ASTNode inExpr = null;
        inExpr = PTFTranslator.getASTNode(inpCInfo, inputRR);
        if (inExpr != null) {
            rr.putExpression(inExpr, cInfo);
            colAlias = inExpr.toStringTree().toLowerCase();
        } else {
            colAlias = colAlias == null ? cInfo.getInternalName() : colAlias;
            rr.put(cInfo.getTabAlias(), colAlias, cInfo);
        }
        evaluator.inputColumnNamesMap.put(cInfo.getInternalName(), colAlias);
        inputColumnNames.add(colAlias);
        inpColOIs.add(cInfo.getObjectInspector());
    }
    StandardListObjectInspector pathAttrOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(inputColumnNames, inpColOIs));
    ColumnInfo pathColumn = new ColumnInfo(PATHATTR_NAME, TypeInfoUtils.getTypeInfoFromObjectInspector(pathAttrOI), null, false, false);
    rr.put(null, PATHATTR_NAME, pathColumn);
    return rr;
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ArrayList(java.util.ArrayList) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) StandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver)

Example 82 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ShowColumnsAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    // table name has to be present so min child 1 and max child 5
    if (root.getChildCount() > 5 || root.getChildCount() < 1) {
        throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(root.toStringTree()));
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    String tableName = getUnescapedName((ASTNode) root.getChild(0));
    String pattern = null;
    boolean isSorted = (root.getFirstChildWithType(HiveParser.KW_SORTED) != null);
    int childCount = root.getChildCount();
    // If isSorted exist, remove one child count from childCount
    if (isSorted) {
        childCount--;
    }
    switch(childCount) {
        case // only tablename no pattern and db
        1:
            break;
        case // tablename and pattern
        2:
            pattern = unescapeSQLString(root.getChild(1).getText());
            break;
        case // specifies db
        3:
            if (tableName.contains(".")) {
                throw new SemanticException("Duplicates declaration for database name");
            }
            tableName = getUnescapedName((ASTNode) root.getChild(2)) + "." + tableName;
            break;
        case // specifies db and pattern
        4:
            if (tableName.contains(".")) {
                throw new SemanticException("Duplicates declaration for database name");
            }
            tableName = getUnescapedName((ASTNode) root.getChild(2)) + "." + tableName;
            pattern = unescapeSQLString(root.getChild(3).getText());
            break;
        default:
            break;
    }
    Table table = getTable(tableName);
    inputs.add(new ReadEntity(table));
    ShowColumnsDesc desc = new ShowColumnsDesc(ctx.getResFile(), tableName, pattern, isSorted);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(ShowColumnsDesc.SCHEMA));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 83 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ConstraintsUtils method processForeignKeys.

public static void processForeignKeys(TableName tableName, ASTNode node, List<SQLForeignKey> foreignKeys) throws SemanticException {
    // The ANTLR grammar looks like :
    // 1.  KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user specifies the constraint name (i.e. child.getChildCount() == 7)
    // 2.  KW_FOREIGN KW_KEY fkCols=columnParenthesesList
    // KW_REFERENCES tabName=tableName parCols=columnParenthesesList
    // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
    // -> ^(TOK_FOREIGN_KEY $fkCols  $tabName $parCols $relySpec $enableSpec $validateSpec)
    // when the user does not specify the constraint name (i.e. child.getChildCount() == 6)
    String constraintName = null;
    boolean enable = true;
    boolean validate = true;
    boolean rely = false;
    int fkIndex = -1;
    for (int i = 0; i < node.getChildCount(); i++) {
        ASTNode grandChild = (ASTNode) node.getChild(i);
        int type = grandChild.getToken().getType();
        if (type == HiveParser.TOK_CONSTRAINT_NAME) {
            constraintName = BaseSemanticAnalyzer.unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase());
        } else if (type == HiveParser.TOK_ENABLE) {
            enable = true;
            // validate is true by default if we enable the constraint
            validate = true;
        } else if (type == HiveParser.TOK_DISABLE) {
            enable = false;
            // validate is false by default if we disable the constraint
            validate = false;
        } else if (type == HiveParser.TOK_VALIDATE) {
            validate = true;
        } else if (type == HiveParser.TOK_NOVALIDATE) {
            validate = false;
        } else if (type == HiveParser.TOK_RELY) {
            rely = true;
        } else if (type == HiveParser.TOK_TABCOLNAME && fkIndex == -1) {
            fkIndex = i;
        }
    }
    if (enable) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("ENABLE feature not supported yet. " + "Please use DISABLE instead."));
    }
    if (validate) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("VALIDATE feature not supported yet. " + "Please use NOVALIDATE instead."));
    }
    int ptIndex = fkIndex + 1;
    int pkIndex = ptIndex + 1;
    if (node.getChild(fkIndex).getChildCount() != node.getChild(pkIndex).getChildCount()) {
        throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" The number of foreign key columns should be same as number of parent key columns "));
    }
    TableName parentTblName = BaseSemanticAnalyzer.getQualifiedTableName((ASTNode) node.getChild(ptIndex));
    for (int j = 0; j < node.getChild(fkIndex).getChildCount(); j++) {
        SQLForeignKey sqlForeignKey = new SQLForeignKey();
        sqlForeignKey.setFktable_db(tableName.getDb());
        sqlForeignKey.setFktable_name(tableName.getTable());
        Tree fkgrandChild = node.getChild(fkIndex).getChild(j);
        BaseSemanticAnalyzer.checkColumnName(fkgrandChild.getText());
        sqlForeignKey.setFkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(fkgrandChild.getText().toLowerCase()));
        sqlForeignKey.setPktable_db(parentTblName.getDb());
        sqlForeignKey.setPktable_name(parentTblName.getTable());
        Tree pkgrandChild = node.getChild(pkIndex).getChild(j);
        sqlForeignKey.setPkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(pkgrandChild.getText().toLowerCase()));
        sqlForeignKey.setKey_seq(j + 1);
        sqlForeignKey.setFk_name(constraintName);
        sqlForeignKey.setEnable_cstr(enable);
        sqlForeignKey.setValidate_cstr(validate);
        sqlForeignKey.setRely_cstr(rely);
        foreignKeys.add(sqlForeignKey);
    }
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Tree(org.antlr.runtime.tree.Tree) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 84 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ConstraintsUtils method generateConstraintInfos.

/**
 * Get the constraint from the AST and populate the cstrInfos with the required information.
 * @param child  The node with the constraint token
 * @param columnNames The name of the columns for the primary key
 * @param typeChildForDefault type of column used for default value type check
 */
private static List<ConstraintInfo> generateConstraintInfos(ASTNode child, List<String> columnNames, ASTNode typeChildForDefault, TokenRewriteStream tokenRewriteStream) throws SemanticException {
    // The ANTLR grammar looks like :
    // 1. KW_CONSTRAINT idfr=identifier KW_PRIMARY KW_KEY pkCols=columnParenthesesList
    // constraintOptsCreate?
    // -> ^(TOK_PRIMARY_KEY $pkCols $idfr constraintOptsCreate?)
    // when the user specifies the constraint name.
    // 2.  KW_PRIMARY KW_KEY columnParenthesesList
    // constraintOptsCreate?
    // -> ^(TOK_PRIMARY_KEY columnParenthesesList constraintOptsCreate?)
    // when the user does not specify the constraint name.
    // Default values
    String constraintName = null;
    // by default if user hasn't provided any optional constraint properties
    // it will be considered ENABLE and NOVALIDATE and RELY=true
    boolean enable = true;
    boolean validate = false;
    boolean rely = true;
    String checkOrDefaultValue = null;
    int childType = child.getToken().getType();
    for (int i = 0; i < child.getChildCount(); i++) {
        ASTNode grandChild = (ASTNode) child.getChild(i);
        int type = grandChild.getToken().getType();
        if (type == HiveParser.TOK_CONSTRAINT_NAME) {
            constraintName = BaseSemanticAnalyzer.unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase());
        } else if (type == HiveParser.TOK_ENABLE) {
            enable = true;
            // validate is false by default if we enable the constraint
            // TODO: A constraint like NOT NULL could be enabled using ALTER but VALIDATE remains
            // false in such cases. Ideally VALIDATE should be set to true to validate existing data
            validate = false;
        } else if (type == HiveParser.TOK_DISABLE) {
            enable = false;
            // validate is false by default if we disable the constraint
            validate = false;
            rely = false;
        } else if (type == HiveParser.TOK_VALIDATE) {
            validate = true;
        } else if (type == HiveParser.TOK_NOVALIDATE) {
            validate = false;
        } else if (type == HiveParser.TOK_RELY) {
            rely = true;
        } else if (type == HiveParser.TOK_NORELY) {
            rely = false;
        } else if (childType == HiveParser.TOK_DEFAULT_VALUE) {
            // try to get default value only if this is DEFAULT constraint
            checkOrDefaultValue = getDefaultValue(grandChild, typeChildForDefault, tokenRewriteStream);
        } else if (childType == HiveParser.TOK_CHECK_CONSTRAINT) {
            checkOrDefaultValue = tokenRewriteStream.toOriginalString(grandChild.getTokenStartIndex(), grandChild.getTokenStopIndex());
        }
    }
    // metastore schema only allows maximum 255 for constraint name column
    if (constraintName != null && constraintName.length() > CONSTRAINT_MAX_LENGTH) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraint name: " + constraintName + " exceeded maximum allowed length: " + CONSTRAINT_MAX_LENGTH));
    }
    // metastore schema only allows maximum 255 for constraint value column
    if (checkOrDefaultValue != null && checkOrDefaultValue.length() > CONSTRAINT_MAX_LENGTH) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraint value: " + checkOrDefaultValue + " exceeded maximum allowed length: " + CONSTRAINT_MAX_LENGTH));
    }
    // NOT NULL constraint could be enforced/enabled
    if (enable && childType != HiveParser.TOK_NOT_NULL && childType != HiveParser.TOK_DEFAULT_VALUE && childType != HiveParser.TOK_CHECK_CONSTRAINT) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("ENABLE/ENFORCED feature not supported yet. " + "Please use DISABLE/NOT ENFORCED instead."));
    }
    if (validate) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("VALIDATE feature not supported yet. " + "Please use NOVALIDATE instead."));
    }
    List<ConstraintInfo> constraintInfos = new ArrayList<>();
    if (columnNames == null) {
        constraintInfos.add(new ConstraintInfo(null, constraintName, enable, validate, rely, checkOrDefaultValue));
    } else {
        for (String columnName : columnNames) {
            constraintInfos.add(new ConstraintInfo(columnName, constraintName, enable, validate, rely, checkOrDefaultValue));
        }
    }
    return constraintInfos;
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ArrayList(java.util.ArrayList) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 85 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ShowTablesAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() > 4) {
        throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(root.toStringTree()));
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    String dbName = SessionState.get().getCurrentDatabase();
    String tableNames = null;
    TableType tableTypeFilter = null;
    boolean isExtended = false;
    for (int i = 0; i < root.getChildCount(); i++) {
        ASTNode child = (ASTNode) root.getChild(i);
        if (child.getType() == HiveParser.TOK_FROM) {
            // Specifies a DB
            dbName = unescapeIdentifier(root.getChild(++i).getText());
            db.validateDatabaseExists(dbName);
        } else if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
            // Filter on table type
            String tableType = unescapeIdentifier(child.getChild(0).getText());
            if (!"table_type".equalsIgnoreCase(tableType)) {
                throw new SemanticException("SHOW TABLES statement only allows equality filter on table_type value");
            }
            tableTypeFilter = TableType.valueOf(unescapeSQLString(child.getChild(1).getText()));
        } else if (child.getType() == HiveParser.KW_EXTENDED) {
            // Include table type
            isExtended = true;
        } else {
            // Uses a pattern
            tableNames = unescapeSQLString(child.getText());
        }
    }
    inputs.add(new ReadEntity(getDatabase(dbName)));
    ShowTablesDesc desc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames, tableTypeFilter, isExtended);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(desc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) TableType(org.apache.hadoop.hive.metastore.TableType) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4