use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableProps.
private void analyzeAlterTableProps(String[] qualified, HashMap<String, String> partSpec, ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException {
String tableName = getDotName(qualified);
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0)).getChild(0));
EnvironmentContext environmentContext = null;
// we need to check if the properties are valid, especially for stats.
// they might be changed via alter table .. update statistics or
// alter table .. set tblproperties. If the property is not row_count
// or raw_data_size, it could not be changed through update statistics
boolean changeStatsSucceeded = false;
for (Entry<String, String> entry : mapProp.entrySet()) {
// wrong.
if (entry.getKey().equals(StatsSetupConst.ROW_COUNT) || entry.getKey().equals(StatsSetupConst.RAW_DATA_SIZE)) {
try {
Long.parseLong(entry.getValue());
changeStatsSucceeded = true;
} catch (Exception e) {
throw new SemanticException("AlterTable " + entry.getKey() + " failed with value " + entry.getValue());
}
} else {
if (queryState.getCommandType().equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName()) || queryState.getCommandType().equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) {
throw new SemanticException("AlterTable UpdateStats " + entry.getKey() + " failed because the only valid keys are " + StatsSetupConst.ROW_COUNT + " and " + StatsSetupConst.RAW_DATA_SIZE);
}
}
if (changeStatsSucceeded) {
environmentContext = new EnvironmentContext();
environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.USER);
}
}
AlterTableDesc alterTblDesc = null;
if (isUnset == true) {
alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, partSpec, expectView);
if (ast.getChild(1) != null) {
alterTblDesc.setDropIfExists(true);
}
} else {
alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDPROPS, partSpec, expectView);
}
alterTblDesc.setProps(mapProp);
alterTblDesc.setEnvironmentContext(environmentContext);
alterTblDesc.setOldName(tableName);
addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableDropConstraint.
private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) throws SemanticException {
String dropConstraintName = unescapeIdentifier(ast.getChild(0).getText());
AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, dropConstraintName);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableRename.
private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expectView) throws SemanticException {
String[] target = getQualifiedTableName((ASTNode) ast.getChild(0));
String sourceName = getDotName(source);
String targetName = getDotName(target);
AlterTableDesc alterTblDesc = new AlterTableDesc(sourceName, targetName, expectView);
addInputsOutputsAlterTable(sourceName, null, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableModifyCols.
private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, HashMap<String, String> partSpec, AlterTableTypes alterType) throws SemanticException {
String tblName = getDotName(qualified);
List<FieldSchema> newCols = getColumns((ASTNode) ast.getChild(0));
boolean isCascade = false;
if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) {
isCascade = true;
}
AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, newCols, alterType, isCascade);
addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.
the class SemanticAnalyzer method setStatsForNonNativeTable.
@SuppressWarnings("unchecked")
private void setStatsForNonNativeTable(Table tab) throws SemanticException {
String tableName = DDLSemanticAnalyzer.getDotName(new String[] { tab.getDbName(), tab.getTableName() });
AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, null, false);
HashMap<String, String> mapProp = new HashMap<>();
mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null);
alterTblDesc.setOldName(tableName);
alterTblDesc.setProps(mapProp);
alterTblDesc.setDropIfExists(true);
this.rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
Aggregations