Search in sources :

Example 56 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDescFunction.

/**
 * Add the task according to the parsed command tree. This is used for the CLI
 * command "DESCRIBE FUNCTION;".
 *
 * @param ast
 *          The parsed command tree.
 * @throws SemanticException
 *           Parsing failed
 */
private void analyzeDescFunction(ASTNode ast) throws SemanticException {
    String funcName;
    boolean isExtended;
    if (ast.getChildCount() == 1) {
        funcName = stripQuotes(ast.getChild(0).getText());
        isExtended = false;
    } else if (ast.getChildCount() == 2) {
        funcName = stripQuotes(ast.getChild(0).getText());
        isExtended = true;
    } else {
        throw new SemanticException("Unexpected Tokens at DESCRIBE FUNCTION");
    }
    DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(), funcName, isExtended);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descFuncDesc)));
    setFetchTask(createFetchTask(descFuncDesc.getSchema()));
}
Also used : DescFunctionDesc(org.apache.hadoop.hive.ql.plan.DescFunctionDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 57 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class ImportSemanticAnalyzer method alterSinglePartition.

private static Task<? extends Serializable> alterSinglePartition(URI fromURI, FileSystem fs, ImportTableDesc tblDesc, Table table, Warehouse wh, AddPartitionDesc addPartitionDesc, ReplicationSpec replicationSpec, org.apache.hadoop.hive.ql.metadata.Partition ptn, EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, IOException, HiveException {
    addPartitionDesc.setReplaceMode(true);
    if ((replicationSpec != null) && (replicationSpec.isInReplicationScope())) {
        addPartitionDesc.setReplicationSpec(replicationSpec);
    }
    AddPartitionDesc.OnePartitionDesc partSpec = addPartitionDesc.getPartition(0);
    if (ptn == null) {
        fixLocationInPartSpec(fs, tblDesc, table, wh, replicationSpec, partSpec, x);
    } else {
        // use existing location
        partSpec.setLocation(ptn.getLocation());
    }
    return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AddPartitionDesc(org.apache.hadoop.hive.ql.plan.AddPartitionDesc)

Example 58 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class ReplicationSemanticAnalyzer method dbUpdateReplStateTask.

private Task<? extends Serializable> dbUpdateReplStateTask(String dbName, String replState, Task<? extends Serializable> preCursor) {
    HashMap<String, String> mapProp = new HashMap<>();
    mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState);
    AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, mapProp, new ReplicationSpec(replState, replState));
    Task<? extends Serializable> updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc), conf);
    // Link the update repl state task with dependency collection task
    if (preCursor != null) {
        preCursor.addDependentTask(updateReplIdTask);
        LOG.debug("Added {}:{} as a precursor of {}:{}", preCursor.getClass(), preCursor.getId(), updateReplIdTask.getClass(), updateReplIdTask.getId());
    }
    return updateReplIdTask;
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) HashMap(java.util.HashMap) AlterDatabaseDesc(org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc)

Example 59 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class LoadDatabase method createDbTask.

private Task<? extends Serializable> createDbTask(Database dbObj) {
    CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc();
    createDbDesc.setName(dbObj.getName());
    createDbDesc.setComment(dbObj.getDescription());
    /*
    explicitly remove the setting of last.repl.id from the db object parameters as loadTask is going
    to run multiple times and explicit logic is in place which prevents updates to tables when db level
    last repl id is set and we create a AlterDatabaseTask at the end of processing a database.
     */
    Map<String, String> parameters = new HashMap<>(dbObj.getParameters());
    parameters.remove(ReplicationSpec.KEY.CURR_STATE_ID.toString());
    createDbDesc.setDatabaseProperties(parameters);
    // note that we do not set location - for repl load, we want that auto-created.
    createDbDesc.setIfNotExists(false);
    // If it exists, we want this to be an error condition. Repl Load is not intended to replace a
    // db.
    // TODO: we might revisit this in create-drop-recreate cases, needs some thinking on.
    DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), createDbDesc);
    return TaskFactory.get(work, context.hiveConf);
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc) HashMap(java.util.HashMap)

Example 60 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class AuthorizationTestUtil method analyze.

/**
 * Create DDLWork from given ast
 * @param ast
 * @param conf
 * @param db
 * @return
 * @throws Exception
 */
public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception {
    DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db);
    SessionState.start(queryState.getConf());
    analyzer.analyze(ast, new Context(queryState.getConf()));
    List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
    return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork();
}
Also used : Context(org.apache.hadoop.hive.ql.Context) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DDLSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)141 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)26 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)24 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)24 Table (org.apache.hadoop.hive.ql.metadata.Table)22 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)20 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)20 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)20 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)19 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)19 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)19 HashMap (java.util.HashMap)17 LinkedHashMap (java.util.LinkedHashMap)16 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)14 Task (org.apache.hadoop.hive.ql.exec.Task)11 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)11 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)10 Serializable (java.io.Serializable)9