Search in sources :

Example 76 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class TestHiveDecimalParse method getColumnType.

private String getColumnType(String query) {
    Driver driver = createDriver();
    int rc = driver.compile(query);
    if (rc != 0) {
        return null;
    }
    QueryPlan plan = driver.getPlan();
    DDLTask task = (DDLTask) plan.getRootTasks().get(0);
    DDLWork work = task.getWork();
    CreateTableDesc spec = work.getCreateTblDesc();
    FieldSchema fs = spec.getCols().get(0);
    return fs.getType();
}
Also used : CreateTableDesc(org.apache.hadoop.hive.ql.plan.CreateTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DDLTask(org.apache.hadoop.hive.ql.exec.DDLTask) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Driver(org.apache.hadoop.hive.ql.Driver) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan)

Example 77 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class LoadPartitions method alterSinglePartition.

private Task<? extends Serializable> alterSinglePartition(AddPartitionDesc desc, ReplicationSpec replicationSpec, Partition ptn) {
    desc.setReplaceMode(true);
    if ((replicationSpec != null) && (replicationSpec.isInReplicationScope())) {
        desc.setReplicationSpec(replicationSpec);
    }
    // use existing location
    desc.getPartition(0).setLocation(ptn.getLocation());
    return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc), context.hiveConf);
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) HashSet(java.util.HashSet)

Example 78 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class LoadPartitions method tasksForAddPartition.

/**
 * returns the root task for adding a partition
 */
private Task<? extends Serializable> tasksForAddPartition(Table table, AddPartitionDesc addPartitionDesc) throws MetaException, IOException, HiveException {
    AddPartitionDesc.OnePartitionDesc partSpec = addPartitionDesc.getPartition(0);
    Path sourceWarehousePartitionLocation = new Path(partSpec.getLocation());
    Path replicaWarehousePartitionLocation = locationOnReplicaWarehouse(table, partSpec);
    partSpec.setLocation(replicaWarehousePartitionLocation.toString());
    LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition " + partSpecToString(partSpec.getPartSpec()) + " with source location: " + partSpec.getLocation());
    Path tmpPath = PathUtils.getExternalTmpPath(replicaWarehousePartitionLocation, context.pathInfo);
    Task<?> copyTask = ReplCopyTask.getLoadCopyTask(event.replicationSpec(), sourceWarehousePartitionLocation, tmpPath, context.hiveConf);
    Task<?> addPartTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), addPartitionDesc), context.hiveConf);
    Task<?> movePartitionTask = movePartitionTask(table, partSpec, tmpPath);
    copyTask.addDependentTask(addPartTask);
    addPartTask.addDependentTask(movePartitionTask);
    return copyTask;
}
Also used : Path(org.apache.hadoop.fs.Path) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AddPartitionDesc(org.apache.hadoop.hive.ql.plan.AddPartitionDesc) HashSet(java.util.HashSet)

Example 79 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class LoadDatabase method setOwnerInfoTask.

private Task<? extends Serializable> setOwnerInfoTask(Database dbObj) {
    AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null);
    DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc);
    return TaskFactory.get(work, context.hiveConf);
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AlterDatabaseDesc(org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc)

Example 80 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class LoadDatabase method alterDbTask.

private static Task<? extends Serializable> alterDbTask(Database dbObj, HiveConf hiveConf) {
    AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), dbObj.getParameters(), null);
    DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc);
    return TaskFactory.get(work, hiveConf);
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) AlterDatabaseDesc(org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)141 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)26 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)24 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)24 Table (org.apache.hadoop.hive.ql.metadata.Table)22 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)20 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)20 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)20 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)19 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)19 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)19 HashMap (java.util.HashMap)17 LinkedHashMap (java.util.LinkedHashMap)16 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)14 Task (org.apache.hadoop.hive.ql.exec.Task)11 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)11 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)10 Serializable (java.io.Serializable)9