Search in sources :

Example 61 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDescDatabase.

/**
 * Describe database.
 *
 * @param ast
 * @throws SemanticException
 */
private void analyzeDescDatabase(ASTNode ast) throws SemanticException {
    boolean isExtended;
    String dbName;
    if (ast.getChildCount() == 1) {
        dbName = stripQuotes(ast.getChild(0).getText());
        isExtended = false;
    } else if (ast.getChildCount() == 2) {
        dbName = stripQuotes(ast.getChild(0).getText());
        isExtended = true;
    } else {
        throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE");
    }
    DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended);
    inputs.add(new ReadEntity(getDatabase(dbName)));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc)));
    setFetchTask(createFetchTask(descDbDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DescDatabaseDesc(org.apache.hadoop.hive.ql.plan.DescDatabaseDesc)

Example 62 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeShowTables.

private void analyzeShowTables(ASTNode ast) throws SemanticException {
    ShowTablesDesc showTblsDesc;
    String dbName = SessionState.get().getCurrentDatabase();
    String tableNames = null;
    if (ast.getChildCount() > 3) {
        throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(ast.toStringTree()));
    }
    switch(ast.getChildCount()) {
        case // Uses a pattern
        1:
            tableNames = unescapeSQLString(ast.getChild(0).getText());
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames);
            break;
        case // Specifies a DB
        2:
            assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(ast.getChild(1).getText());
            validateDatabase(dbName);
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName);
            break;
        case // Uses a pattern and specifies a DB
        3:
            assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(ast.getChild(1).getText());
            tableNames = unescapeSQLString(ast.getChild(2).getText());
            validateDatabase(dbName);
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames);
            break;
        default:
            // No pattern or DB
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName);
            break;
    }
    inputs.add(new ReadEntity(getDatabase(dbName)));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblsDesc)));
    setFetchTask(createFetchTask(showTblsDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) ShowTablesDesc(org.apache.hadoop.hive.ql.plan.ShowTablesDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 63 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeShowColumns.

private void analyzeShowColumns(ASTNode ast) throws SemanticException {
    // table name has to be present so min child 1 and max child 4
    if (ast.getChildCount() > 4 || ast.getChildCount() < 1) {
        throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(ast.toStringTree()));
    }
    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
    ShowColumnsDesc showColumnsDesc = null;
    String pattern = null;
    switch(ast.getChildCount()) {
        case // only tablename no pattern and db
        1:
            showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName);
            break;
        case // tablename and pattern
        2:
            pattern = unescapeSQLString(ast.getChild(1).getText());
            showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName, pattern);
            break;
        case // specifies db
        3:
            if (tableName.contains(".")) {
                throw new SemanticException("Duplicates declaration for database name");
            }
            tableName = getUnescapedName((ASTNode) ast.getChild(2)) + "." + tableName;
            showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName);
            break;
        case // specifies db and pattern
        4:
            if (tableName.contains(".")) {
                throw new SemanticException("Duplicates declaration for database name");
            }
            tableName = getUnescapedName((ASTNode) ast.getChild(2)) + "." + tableName;
            pattern = unescapeSQLString(ast.getChild(3).getText());
            showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName, pattern);
            break;
        default:
            break;
    }
    Table tab = getTable(tableName);
    inputs.add(new ReadEntity(tab));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showColumnsDesc)));
    setFetchTask(createFetchTask(showColumnsDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) ShowColumnsDesc(org.apache.hadoop.hive.ql.plan.ShowColumnsDesc)

Example 64 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeShowCreateDatabase.

private void analyzeShowCreateDatabase(ASTNode ast) throws SemanticException {
    String dbName = getUnescapedName((ASTNode) ast.getChild(0));
    ShowCreateDatabaseDesc showCreateDbDesc = new ShowCreateDatabaseDesc(dbName, ctx.getResFile().toString());
    Database database = getDatabase(dbName);
    inputs.add(new ReadEntity(database));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateDbDesc)));
    setFetchTask(createFetchTask(showCreateDbDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) ShowCreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.ShowCreateDatabaseDesc)

Example 65 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeExchangePartition.

private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException {
    Table destTable = getTable(qualified);
    Table sourceTable = getTable(getUnescapedName((ASTNode) ast.getChild(1)));
    // Get the partition specs
    Map<String, String> partSpecs = getValidatedPartSpec(sourceTable, (ASTNode) ast.getChild(0), conf, false);
    validatePartitionValues(partSpecs);
    boolean sameColumns = MetaStoreUtils.compareFieldColumns(destTable.getAllCols(), sourceTable.getAllCols());
    boolean samePartitions = MetaStoreUtils.compareFieldColumns(destTable.getPartitionKeys(), sourceTable.getPartitionKeys());
    if (!sameColumns || !samePartitions) {
        throw new SemanticException(ErrorMsg.TABLES_INCOMPATIBLE_SCHEMAS.getMsg());
    }
    // files with write IDs may not be valid. It may affect snapshot isolation for on-going txns as well.
    if (AcidUtils.isTransactionalTable(sourceTable) || AcidUtils.isTransactionalTable(destTable)) {
        throw new SemanticException(ErrorMsg.EXCHANGE_PARTITION_NOT_ALLOWED_WITH_TRANSACTIONAL_TABLES.getMsg());
    }
    // check if source partition exists
    getPartitions(sourceTable, partSpecs, true);
    // Verify that the partitions specified are continuous
    // If a subpartition value is specified without specifying a partition's value
    // then we throw an exception
    int counter = isPartitionValueContinuous(sourceTable.getPartitionKeys(), partSpecs);
    if (counter < 0) {
        throw new SemanticException(ErrorMsg.PARTITION_VALUE_NOT_CONTINUOUS.getMsg(partSpecs.toString()));
    }
    List<Partition> destPartitions = null;
    try {
        destPartitions = getPartitions(destTable, partSpecs, true);
    } catch (SemanticException ex) {
    // We should expect a semantic exception being throw as this partition
    // should not be present.
    }
    if (destPartitions != null) {
        // If any destination partition is present then throw a Semantic Exception.
        throw new SemanticException(ErrorMsg.PARTITION_EXISTS.getMsg(destPartitions.toString()));
    }
    AlterTableExchangePartition alterTableExchangePartition = new AlterTableExchangePartition(sourceTable, destTable, partSpecs);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTableExchangePartition)));
    inputs.add(new ReadEntity(sourceTable));
    outputs.add(new WriteEntity(destTable, WriteType.DDL_SHARED));
}
Also used : Partition(org.apache.hadoop.hive.ql.metadata.Partition) AlterTableExchangePartition(org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AlterTableExchangePartition(org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Aggregations

ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)75 Table (org.apache.hadoop.hive.ql.metadata.Table)35 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)34 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)24 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)18 Partition (org.apache.hadoop.hive.ql.metadata.Partition)18 ArrayList (java.util.ArrayList)15 Referenceable (org.apache.atlas.typesystem.Referenceable)10 LinkedHashMap (java.util.LinkedHashMap)9 AlterTableExchangePartition (org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition)9 HashMap (java.util.HashMap)8 Test (org.testng.annotations.Test)8 Path (org.apache.hadoop.fs.Path)7 FileNotFoundException (java.io.FileNotFoundException)6 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)5 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)5 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)5 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)5 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)5