use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class TestHiveDecimalParse method getColumnType.
private String getColumnType(String query) {
Driver driver = createDriver();
int rc = driver.compile(query);
if (rc != 0) {
return null;
}
QueryPlan plan = driver.getPlan();
DDLTask task = (DDLTask) plan.getRootTasks().get(0);
DDLWork work = task.getWork();
CreateTableDesc spec = work.getCreateTblDesc();
FieldSchema fs = spec.getCols().get(0);
return fs.getType();
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class LoadPartitions method alterSinglePartition.
private Task<? extends Serializable> alterSinglePartition(AddPartitionDesc desc, ReplicationSpec replicationSpec, Partition ptn) {
desc.setReplaceMode(true);
if ((replicationSpec != null) && (replicationSpec.isInReplicationScope())) {
desc.setReplicationSpec(replicationSpec);
}
// use existing location
desc.getPartition(0).setLocation(ptn.getLocation());
return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc), context.hiveConf);
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class LoadPartitions method tasksForAddPartition.
/**
* returns the root task for adding a partition
*/
private Task<? extends Serializable> tasksForAddPartition(Table table, AddPartitionDesc addPartitionDesc) throws MetaException, IOException, HiveException {
AddPartitionDesc.OnePartitionDesc partSpec = addPartitionDesc.getPartition(0);
Path sourceWarehousePartitionLocation = new Path(partSpec.getLocation());
Path replicaWarehousePartitionLocation = locationOnReplicaWarehouse(table, partSpec);
partSpec.setLocation(replicaWarehousePartitionLocation.toString());
LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition " + partSpecToString(partSpec.getPartSpec()) + " with source location: " + partSpec.getLocation());
Path tmpPath = PathUtils.getExternalTmpPath(replicaWarehousePartitionLocation, context.pathInfo);
Task<?> copyTask = ReplCopyTask.getLoadCopyTask(event.replicationSpec(), sourceWarehousePartitionLocation, tmpPath, context.hiveConf);
Task<?> addPartTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), addPartitionDesc), context.hiveConf);
Task<?> movePartitionTask = movePartitionTask(table, partSpec, tmpPath);
copyTask.addDependentTask(addPartTask);
addPartTask.addDependentTask(movePartitionTask);
return copyTask;
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class LoadDatabase method setOwnerInfoTask.
private Task<? extends Serializable> setOwnerInfoTask(Database dbObj) {
AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null);
DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc);
return TaskFactory.get(work, context.hiveConf);
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class LoadDatabase method alterDbTask.
private static Task<? extends Serializable> alterDbTask(Database dbObj, HiveConf hiveConf) {
AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), dbObj.getParameters(), null);
DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc);
return TaskFactory.get(work, hiveConf);
}
Aggregations