Search in sources :

Example 36 with TableName

use of org.apache.hadoop.hive.common.TableName in project hive by apache.

the class CreateMaterializedViewOperation method execute.

@Override
public int execute() throws HiveException {
    Table oldview = context.getDb().getTable(desc.getViewName(), false);
    if (oldview != null) {
        if (desc.getIfNotExists()) {
            return 0;
        }
        // Materialized View already exists, thus we should be replacing
        throw new HiveException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(desc.getViewName()));
    } else {
        // We create new view
        Table tbl = desc.toTable(context.getConf());
        // We set the signature for the view if it is a materialized view
        if (tbl.isMaterializedView()) {
            Set<SourceTable> sourceTables = new HashSet<>(desc.getTablesUsed().size());
            for (TableName tableName : desc.getTablesUsed()) {
                sourceTables.add(context.getDb().getTable(tableName).createSourceTable());
            }
            MaterializedViewMetadata metadata = new MaterializedViewMetadata(MetaStoreUtils.getDefaultCatalog(context.getConf()), tbl.getDbName(), tbl.getTableName(), sourceTables, context.getConf().get(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY));
            tbl.setMaterializedViewMetadata(metadata);
        }
        context.getDb().createTable(tbl, desc.getIfNotExists());
        DDLUtils.addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK), context.getWork().getOutputs());
        // set lineage info
        DataContainer dc = new DataContainer(tbl.getTTable());
        Map<String, String> tblProps = tbl.getTTable().getParameters();
        Path tlocation = null;
        try {
            Warehouse wh = new Warehouse(context.getConf());
            tlocation = wh.getDefaultTablePath(context.getDb().getDatabase(tbl.getDbName()), tbl.getTableName(), tblProps == null || !AcidUtils.isTablePropertyTransactional(tblProps));
        } catch (MetaException e) {
            throw new HiveException(e);
        }
        context.getQueryState().getLineageState().setLineage(tlocation, dc, tbl.getCols());
    }
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) Table(org.apache.hadoop.hive.ql.metadata.Table) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) TableName(org.apache.hadoop.hive.common.TableName) DataContainer(org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer) MaterializedViewMetadata(org.apache.hadoop.hive.ql.metadata.MaterializedViewMetadata) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) HashSet(java.util.HashSet) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 37 with TableName

use of org.apache.hadoop.hive.common.TableName in project hive by apache.

the class AlterMaterializedViewRewriteAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    TableName tableName = getQualifiedTableName((ASTNode) root.getChild(0));
    // Value for the flag
    boolean rewriteEnable;
    switch(root.getChild(1).getType()) {
        case HiveParser.TOK_REWRITE_ENABLED:
            rewriteEnable = true;
            break;
        case HiveParser.TOK_REWRITE_DISABLED:
            rewriteEnable = false;
            break;
        default:
            throw new SemanticException("Invalid alter materialized view expression");
    }
    // It can be fully qualified name or use default database
    Table materializedViewTable = getTable(tableName, true);
    // only uses transactional (MM and ACID) tables
    if (rewriteEnable) {
        for (SourceTable sourceTable : materializedViewTable.getMVMetadata().getSourceTables()) {
            if (!AcidUtils.isTransactionalTable(sourceTable.getTable())) {
                throw new SemanticException("Automatic rewriting for materialized view cannot be enabled if the " + "materialized view uses non-transactional tables");
            }
        }
    }
    AlterMaterializedViewRewriteDesc desc = new AlterMaterializedViewRewriteDesc(tableName.getNotEmptyDbTable(), rewriteEnable);
    if (AcidUtils.isTransactionalTable(materializedViewTable)) {
        ddlDescWithWriteId = desc;
    }
    inputs.add(new ReadEntity(materializedViewTable));
    outputs.add(new WriteEntity(materializedViewTable, WriteEntity.WriteType.DDL_EXCLUSIVE));
    // Create task for alterMVRewriteDesc
    DDLWork work = new DDLWork(getInputs(), getOutputs(), desc);
    Task<?> targetTask = TaskFactory.get(work);
    // Create task to update rewrite flag as dependant of previous one
    MaterializedViewUpdateDesc materializedViewUpdateDesc = new MaterializedViewUpdateDesc(tableName.getNotEmptyDbTable(), rewriteEnable, !rewriteEnable, false);
    DDLWork updateDdlWork = new DDLWork(getInputs(), getOutputs(), materializedViewUpdateDesc);
    targetTask.addDependentTask(TaskFactory.get(updateDdlWork, conf));
    // Add root task
    rootTasks.add(targetTask);
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) TableName(org.apache.hadoop.hive.common.TableName) Table(org.apache.hadoop.hive.ql.metadata.Table) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) MaterializedViewUpdateDesc(org.apache.hadoop.hive.ql.ddl.view.materialized.update.MaterializedViewUpdateDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 38 with TableName

use of org.apache.hadoop.hive.common.TableName in project hive by apache.

the class ReplLoadTask method createViewTask.

public static Task<?> createViewTask(MetaData metaData, String dbNameToLoadIn, HiveConf conf, String dumpDirectory, ReplicationMetricCollector metricCollector) throws SemanticException {
    Table table = new Table(metaData.getTable());
    String dbName = dbNameToLoadIn == null ? table.getDbName() : dbNameToLoadIn;
    TableName tableName = HiveTableName.ofNullable(table.getTableName(), dbName);
    String dbDotView = tableName.getNotEmptyDbTable();
    String viewOriginalText = table.getViewOriginalText();
    String viewExpandedText = table.getViewExpandedText();
    if (!dbName.equals(table.getDbName())) {
    // TODO: If the DB name doesn't match with the metadata from dump, then need to rewrite the original and expanded
    // texts using new DB name. Currently it refers to the source database name.
    }
    CreateViewDesc desc = new CreateViewDesc(dbDotView, table.getCols(), null, table.getParameters(), table.getPartColNames(), false, false, viewOriginalText, viewExpandedText, table.getPartCols());
    desc.setReplicationSpec(metaData.getReplicationSpec());
    desc.setOwnerName(table.getOwner());
    return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc, true, dumpDirectory, metricCollector), conf);
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) HiveTableName(org.apache.hadoop.hive.ql.parse.HiveTableName) CreateViewDesc(org.apache.hadoop.hive.ql.ddl.view.create.CreateViewDesc) LoadTable(org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.table.LoadTable) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) HashSet(java.util.HashSet)

Example 39 with TableName

use of org.apache.hadoop.hive.common.TableName in project hive by apache.

the class BaseSemanticAnalyzer method getColumns.

/**
 * Get the list of FieldSchema out of the ASTNode.
 * Additionally, populate the primaryKeys and foreignKeys if any.
 */
public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints, Configuration conf) throws SemanticException {
    List<FieldSchema> colList = new ArrayList<FieldSchema>();
    Tree parent = ast.getParent();
    for (int i = 0; i < ast.getChildCount(); i++) {
        FieldSchema col = new FieldSchema();
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.TOK_UNIQUE:
                {
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
                    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
                    // the user specified a catalog
                    ConstraintsUtils.processUniqueConstraints(tName, child, uniqueConstraints);
                }
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                {
                    if (!primaryKeys.isEmpty()) {
                        throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                    }
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    ConstraintsUtils.processPrimaryKeys(tName, child, primaryKeys);
                }
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                {
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0));
                    ConstraintsUtils.processForeignKeys(tName, child, foreignKeys);
                }
                break;
            case HiveParser.TOK_CHECK_CONSTRAINT:
                {
                    final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
                    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
                    // the user specified a catalog
                    ConstraintsUtils.processCheckConstraints(tName, child, null, checkConstraints, null, tokenRewriteStream);
                }
                break;
            default:
                Tree grandChild = child.getChild(0);
                if (grandChild != null) {
                    String name = grandChild.getText();
                    if (lowerCase) {
                        name = name.toLowerCase();
                    }
                    checkColumnName(name);
                    // child 0 is the name of the column
                    col.setName(unescapeIdentifier(name));
                    // child 1 is the type of the column
                    ASTNode typeChild = (ASTNode) (child.getChild(1));
                    col.setType(getTypeStringFromAST(typeChild));
                    // child 2 is the optional comment of the column
                    // child 3 is the optional constraint
                    ASTNode constraintChild = null;
                    if (child.getChildCount() == 4) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                        constraintChild = (ASTNode) child.getChild(3);
                    } else if (child.getChildCount() == 3 && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) {
                        col.setComment(unescapeSQLString(child.getChild(2).getText()));
                    } else if (child.getChildCount() == 3) {
                        constraintChild = (ASTNode) child.getChild(2);
                    }
                    if (constraintChild != null) {
                        final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf));
                        // Process column constraint
                        switch(constraintChild.getToken().getType()) {
                            case HiveParser.TOK_CHECK_CONSTRAINT:
                                ConstraintsUtils.processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), checkConstraints, typeChild, tokenRewriteStream);
                                break;
                            case HiveParser.TOK_DEFAULT_VALUE:
                                ConstraintsUtils.processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream);
                                break;
                            case HiveParser.TOK_NOT_NULL:
                                ConstraintsUtils.processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), notNullConstraints);
                                break;
                            case HiveParser.TOK_UNIQUE:
                                ConstraintsUtils.processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), uniqueConstraints);
                                break;
                            case HiveParser.TOK_PRIMARY_KEY:
                                if (!primaryKeys.isEmpty()) {
                                    throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg("Cannot exist more than one primary key definition for the same table"));
                                }
                                ConstraintsUtils.processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys);
                                break;
                            case HiveParser.TOK_FOREIGN_KEY:
                                ConstraintsUtils.processForeignKeys(tName, constraintChild, foreignKeys);
                                break;
                            default:
                                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
                        }
                    }
                }
                colList.add(col);
                break;
        }
    }
    return colList;
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 40 with TableName

use of org.apache.hadoop.hive.common.TableName in project hive by apache.

the class HiveTableName method setFrom.

/**
 * Set a @{@link Table} object's table and db names based on the provided string.
 * @param dbTable the dbtable string
 * @param table the table to update
 * @return the table
 * @throws SemanticException
 */
public static Table setFrom(String dbTable, Table table) throws SemanticException {
    TableName name = ofNullable(dbTable);
    table.setTableName(name.getTable());
    table.setDbName(name.getDb());
    return table;
}
Also used : TableName(org.apache.hadoop.hive.common.TableName)

Aggregations

TableName (org.apache.hadoop.hive.common.TableName)47 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)22 ArrayList (java.util.ArrayList)16 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)15 Table (org.apache.hadoop.hive.ql.metadata.Table)14 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)11 HiveTableName (org.apache.hadoop.hive.ql.parse.HiveTableName)10 HashMap (java.util.HashMap)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)9 HashSet (java.util.HashSet)8 IOException (java.io.IOException)7 List (java.util.List)7 Path (org.apache.hadoop.fs.Path)7 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)7 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)7 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)7 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)7 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)7 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)6