use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableSkewedLocation.
/**
* Analyze alter table's skewed location
*
* @param ast
* @param tableName
* @param partSpec
* @throws SemanticException
*/
private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
/**
* Throw an error if the user tries to use the DDL with
* hive.internal.ddl.list.bucketing.enable set to false.
*/
HiveConf hiveConf = SessionState.get().getConf();
/**
* Retrieve mappings from parser
*/
Map<List<String>, String> locations = new HashMap<List<String>, String>();
ArrayList<Node> locNodes = ast.getChildren();
if (null == locNodes) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
} else {
for (Node locNode : locNodes) {
// TOK_SKEWED_LOCATIONS
ASTNode locAstNode = (ASTNode) locNode;
ArrayList<Node> locListNodes = locAstNode.getChildren();
if (null == locListNodes) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
} else {
for (Node locListNode : locListNodes) {
// TOK_SKEWED_LOCATION_LIST
ASTNode locListAstNode = (ASTNode) locListNode;
ArrayList<Node> locMapNodes = locListAstNode.getChildren();
if (null == locMapNodes) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
} else {
for (Node locMapNode : locMapNodes) {
// TOK_SKEWED_LOCATION_MAP
ASTNode locMapAstNode = (ASTNode) locMapNode;
ArrayList<Node> locMapAstNodeMaps = locMapAstNode.getChildren();
if ((null == locMapAstNodeMaps) || (locMapAstNodeMaps.size() != 2)) {
throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg());
} else {
List<String> keyList = new LinkedList<String>();
ASTNode node = (ASTNode) locMapAstNodeMaps.get(0);
if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) {
keyList = getSkewedValuesFromASTNode(node);
} else if (isConstant(node)) {
keyList.add(PlanUtils.stripQuotes(node.getText()));
} else {
throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
}
String newLocation = PlanUtils.stripQuotes(unescapeSQLString(((ASTNode) locMapAstNodeMaps.get(1)).getText()));
validateSkewedLocationString(newLocation);
locations.put(keyList, newLocation);
addLocationToOutputs(newLocation);
}
}
}
}
}
}
}
AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, locations, partSpec);
addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableLocation.
private void analyzeAlterTableLocation(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
String newLocation = unescapeSQLString(ast.getChild(0).getText());
try {
// To make sure host/port pair is valid, the status of the location
// does not matter
FileSystem.get(new URI(newLocation), conf).getFileStatus(new Path(newLocation));
} catch (FileNotFoundException e) {
// Only check host/port pair is valid, wheter the file exist or not does not matter
} catch (Exception e) {
throw new SemanticException("Cannot connect to namenode, please check if host/port pair for " + newLocation + " is valid", e);
}
addLocationToOutputs(newLocation);
AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, newLocation, partSpec);
addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableSerde.
private void analyzeAlterTableSerde(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
String serdeName = unescapeSQLString(ast.getChild(0).getText());
AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDSERDE);
if (ast.getChildCount() > 1) {
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0));
alterTblDesc.setProps(mapProp);
}
alterTblDesc.setOldName(tableName);
alterTblDesc.setSerdeName(serdeName);
alterTblDesc.setPartSpec(partSpec);
addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method handleAlterTableSkewedBy.
/**
* Process "alter table <name> skewed by .. on .. stored as directories
* @param ast
* @param tableName
* @param tab
* @throws SemanticException
*/
private void handleAlterTableSkewedBy(ASTNode ast, String tableName, Table tab) throws SemanticException {
List<String> skewedColNames = new ArrayList<String>();
List<List<String>> skewedValues = new ArrayList<List<String>>();
/* skewed column names. */
ASTNode skewedNode = (ASTNode) ast.getChild(0);
skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, skewedNode);
/* skewed value. */
analyzeDDLSkewedValues(skewedValues, skewedNode);
// stored as directories
boolean storedAsDirs = analyzeStoredAdDirs(skewedNode);
AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, false, skewedColNames, skewedValues);
alterTblDesc.setStoredAsSubDirectories(storedAsDirs);
/**
* Validate information about skewed table
*/
alterTblDesc.setTable(tab);
alterTblDesc.validate();
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableSerdeProps.
private void analyzeAlterTableSerdeProps(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0)).getChild(0));
AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDSERDEPROPS);
alterTblDesc.setProps(mapProp);
alterTblDesc.setOldName(tableName);
alterTblDesc.setPartSpec(partSpec);
addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Aggregations