Search in sources :

Example 16 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class ImportSemanticAnalyzer method addSinglePartition.

private static Task<?> addSinglePartition(URI fromURI, FileSystem fs, ImportTableDesc tblDesc, Table table, Warehouse wh, AddPartitionDesc addPartitionDesc, ReplicationSpec replicationSpec, EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, IOException, HiveException {
    AddPartitionDesc.OnePartitionDesc partSpec = addPartitionDesc.getPartition(0);
    if (tblDesc.isExternal() && tblDesc.getLocation() == null) {
        x.getLOG().debug("Importing in-place: adding AddPart for partition " + partSpecToString(partSpec.getPartSpec()));
        // addPartitionDesc already has the right partition location
        Task<?> addPartTask = TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf());
        return addPartTask;
    } else {
        String srcLocation = partSpec.getLocation();
        fixLocationInPartSpec(fs, tblDesc, table, wh, replicationSpec, partSpec, x);
        x.getLOG().debug("adding dependent CopyWork/AddPart/MoveWork for partition " + partSpecToString(partSpec.getPartSpec()) + " with source location: " + srcLocation);
        Path tgtLocation = new Path(partSpec.getLocation());
        Path tmpPath = x.getCtx().getExternalTmpPath(tgtLocation);
        Task<?> copyTask = ReplCopyTask.getLoadCopyTask(replicationSpec, new Path(srcLocation), tmpPath, x.getConf());
        Task<?> addPartTask = TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf());
        LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath, Utilities.getTableDesc(table), partSpec.getPartSpec(), true);
        loadTableWork.setInheritTableSpecs(false);
        Task<?> loadPartTask = TaskFactory.get(new MoveWork(x.getInputs(), x.getOutputs(), loadTableWork, null, false), x.getConf());
        copyTask.addDependentTask(loadPartTask);
        addPartTask.addDependentTask(loadPartTask);
        x.getTasks().add(copyTask);
        return addPartTask;
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MoveWork(org.apache.hadoop.hive.ql.plan.MoveWork) LoadTableDesc(org.apache.hadoop.hive.ql.plan.LoadTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AddPartitionDesc(org.apache.hadoop.hive.ql.plan.AddPartitionDesc)

Example 17 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeSwitchDatabase.

private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    Database database = getDatabase(dbName, true);
    ReadEntity dbReadEntity = new ReadEntity(database);
    dbReadEntity.noLockNeeded();
    inputs.add(dbReadEntity);
    SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc), conf));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) SwitchDatabaseDesc(org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc)

Example 18 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method handleAlterTableDisableStoredAsDirs.

/**
   * Handle alter table <name> not stored as directories
   *
   * @param tableName
   * @param tab
   * @throws SemanticException
   */
private void handleAlterTableDisableStoredAsDirs(String tableName, Table tab) throws SemanticException {
    List<String> skewedColNames = tab.getSkewedColNames();
    List<List<String>> skewedColValues = tab.getSkewedColValues();
    if ((skewedColNames == null) || (skewedColNames.size() == 0) || (skewedColValues == null) || (skewedColValues.size() == 0)) {
        throw new SemanticException(ErrorMsg.ALTER_TBL_STOREDASDIR_NOT_SKEWED.getMsg(tableName));
    }
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, false, skewedColNames, skewedColValues);
    alterTblDesc.setStoredAsSubDirectories(false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) ArrayList(java.util.ArrayList) List(java.util.List) LinkedList(java.util.LinkedList)

Example 19 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableBucketNum.

private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, HashMap<String, String> partSpec) throws SemanticException {
    Table tab = getTable(tblName, true);
    if (tab.getBucketCols() == null || tab.getBucketCols().isEmpty()) {
        throw new SemanticException(ErrorMsg.ALTER_BUCKETNUM_NONBUCKETIZED_TBL.getMsg());
    }
    validateAlterTableType(tab, AlterTableTypes.ALTERBUCKETNUM);
    inputs.add(new ReadEntity(tab));
    int bucketNum = Integer.parseInt(ast.getChild(0).getText());
    AlterTableDesc alterBucketNum = new AlterTableDesc(tblName, partSpec, bucketNum);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterBucketNum), conf));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 20 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableArchive.

private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolean isUnArchive) throws SemanticException {
    if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) {
        throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg());
    }
    Table tab = getTable(qualified);
    // partition name to value
    List<Map<String, String>> partSpecs = getPartitionSpecs(tab, ast);
    addTablePartsOutputs(tab, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK);
    validateAlterTableType(tab, AlterTableTypes.ARCHIVE);
    inputs.add(new ReadEntity(tab));
    if (partSpecs.size() > 1) {
        throw new SemanticException(isUnArchive ? ErrorMsg.UNARCHIVE_ON_MULI_PARTS.getMsg() : ErrorMsg.ARCHIVE_ON_MULI_PARTS.getMsg());
    }
    if (partSpecs.size() == 0) {
        throw new SemanticException(ErrorMsg.ARCHIVE_ON_TABLE.getMsg());
    }
    Map<String, String> partSpec = partSpecs.get(0);
    try {
        isValidPrefixSpec(tab, partSpec);
    } catch (HiveException e) {
        throw new SemanticException(e.getMessage(), e);
    }
    AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(getDotName(qualified), partSpec, (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc), conf));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) AlterTableSimpleDesc(org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)104 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)23 Table (org.apache.hadoop.hive.ql.metadata.Table)20 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)20 Test (org.junit.Test)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)17 LinkedHashMap (java.util.LinkedHashMap)15 HashMap (java.util.HashMap)14 ArrayList (java.util.ArrayList)13 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)12 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)12 PrivilegeDesc (org.apache.hadoop.hive.ql.plan.PrivilegeDesc)9 RoleDDLDesc (org.apache.hadoop.hive.ql.plan.RoleDDLDesc)9 Map (java.util.Map)8 Path (org.apache.hadoop.fs.Path)8 FileNotFoundException (java.io.FileNotFoundException)7 LinkedList (java.util.LinkedList)7 List (java.util.List)7 GrantRevokeRoleDDL (org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL)7 ShowGrantDesc (org.apache.hadoop.hive.ql.plan.ShowGrantDesc)7