use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterTableSetLocationAnalyzer method analyzeCommand.
@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
String newLocation = unescapeSQLString(command.getChild(0).getText());
try {
// To make sure host/port pair is valid, the status of the location does not matter
FileSystem.get(new URI(newLocation), conf).getFileStatus(new Path(newLocation));
} catch (FileNotFoundException e) {
// Only check host/port pair is valid, whether the file exist or not does not matter
} catch (Exception e) {
throw new SemanticException("Cannot connect to namenode, please check if host/port pair for " + newLocation + " is valid", e);
}
outputs.add(toWriteEntity(newLocation));
AlterTableSetLocationDesc desc = new AlterTableSetLocationDesc(tableName, partitionSpec, newLocation);
Table table = getTable(tableName);
if (AcidUtils.isTransactionalTable(table)) {
setAcidDdlDesc(desc);
}
addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.ALTERLOCATION, false);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterTableSkewedByAnalyzer method analyzeCommand.
@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
Table table = getTable(tableName);
validateAlterTableType(table, AlterTableType.SKEWED_BY, false);
inputs.add(new ReadEntity(table));
outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_EXCLUSIVE));
DDLDescWithWriteId desc = null;
if (command.getChildCount() == 0) {
desc = new AlterTableNotSkewedDesc(tableName);
setAcidDdlDesc(table, desc);
} else {
switch(((ASTNode) command.getChild(0)).getToken().getType()) {
case HiveParser.TOK_TABLESKEWED:
desc = handleAlterTableSkewedBy(command, tableName, table);
setAcidDdlDesc(table, desc);
break;
case HiveParser.TOK_STOREDASDIRS:
desc = handleAlterTableDisableStoredAsDirs(tableName, table);
setAcidDdlDesc(table, desc);
break;
default:
assert false;
}
}
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterTableCompactAnalyzer method analyzeCommand.
@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
String type = unescapeSQLString(command.getChild(0).getText()).toLowerCase();
try {
CompactionType.valueOf(type.toUpperCase());
} catch (IllegalArgumentException e) {
throw new SemanticException(ErrorMsg.INVALID_COMPACTION_TYPE.getMsg());
}
Map<String, String> mapProp = null;
boolean isBlocking = false;
for (int i = 0; i < command.getChildCount(); i++) {
switch(command.getChild(i).getType()) {
case HiveParser.TOK_TABLEPROPERTIES:
mapProp = getProps((ASTNode) (command.getChild(i)).getChild(0));
break;
case HiveParser.TOK_BLOCKING:
isBlocking = true;
break;
default:
break;
}
}
AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, partitionSpec, type, isBlocking, mapProp);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterTableConcatenateAnalyzer method compactAcidTable.
private void compactAcidTable(TableName tableName, Map<String, String> partitionSpec) throws SemanticException {
boolean isBlocking = !HiveConf.getBoolVar(conf, ConfVars.TRANSACTIONAL_CONCATENATE_NOBLOCK, false);
AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, partitionSpec, "MAJOR", isBlocking, null);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
setAcidDdlDesc(getTable(tableName), desc);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterTableSetSkewedLocationAnalyzer method analyzeCommand.
@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
List<Node> locationNodes = command.getChildren();
if (locationNodes == null) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
}
Map<List<String>, String> locations = new HashMap<>();
for (Node locationNode : locationNodes) {
List<Node> locationListNodes = ((ASTNode) locationNode).getChildren();
if (locationListNodes == null) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
}
for (Node locationListNode : locationListNodes) {
List<Node> locationMapNodes = ((ASTNode) locationListNode).getChildren();
if (locationMapNodes == null) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
}
for (Node locationMapNode : locationMapNodes) {
List<Node> locationMapNodeMaps = ((ASTNode) locationMapNode).getChildren();
if ((locationMapNodeMaps == null) || (locationMapNodeMaps.size() != 2)) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg());
}
List<String> keyList = new LinkedList<String>();
ASTNode node = (ASTNode) locationMapNodeMaps.get(0);
if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) {
keyList = SkewedTableUtils.getSkewedValuesFromASTNode(node);
} else if (isConstant(node)) {
keyList.add(PlanUtils.stripQuotes(node.getText()));
} else {
throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
}
String newLocation = PlanUtils.stripQuotes(unescapeSQLString(((ASTNode) locationMapNodeMaps.get(1)).getText()));
validateSkewedLocationString(newLocation);
locations.put(keyList, newLocation);
outputs.add(toWriteEntity(newLocation));
}
}
}
AbstractAlterTableDesc desc = new AlterTableSetSkewedLocationDesc(tableName, partitionSpec, locations);
setAcidDdlDesc(getTable(tableName), desc);
addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_SKEWED_LOCATION, false);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Aggregations