use of org.apache.hadoop.hive.ql.plan.AlterIndexDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterIndexProps.
private void analyzeAlterIndexProps(ASTNode ast) throws SemanticException {
String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
String indexName = unescapeIdentifier(ast.getChild(1).getText());
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(2)).getChild(0));
AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.ADDPROPS);
alterIdxDesc.setProps(mapProp);
alterIdxDesc.setIndexName(indexName);
alterIdxDesc.setBaseTableName(getDotName(qualified));
rootTasks.add(TaskFactory.get(new DDLWork(alterIdxDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.AlterIndexDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterIndexRebuild.
private void analyzeAlterIndexRebuild(ASTNode ast) throws SemanticException {
String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
String indexName = unescapeIdentifier(ast.getChild(1).getText());
HashMap<String, String> partSpec = null;
Tree part = ast.getChild(2);
if (part != null) {
partSpec = getValidatedPartSpec(getTable(qualified), (ASTNode) part, conf, false);
}
List<Task<?>> indexBuilder = getIndexBuilderMapRed(qualified, indexName, partSpec);
rootTasks.addAll(indexBuilder);
// Handle updating index timestamps
AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.UPDATETIMESTAMP);
alterIdxDesc.setIndexName(indexName);
alterIdxDesc.setBaseTableName(getDotName(qualified));
alterIdxDesc.setSpec(partSpec);
Task<?> tsTask = TaskFactory.get(new DDLWork(alterIdxDesc), conf);
for (Task<?> t : indexBuilder) {
t.addDependentTask(tsTask);
}
}
Aggregations