use of org.apache.hadoop.hive.ql.plan.CreateMacroDesc in project hive by apache.
the class FunctionTask method execute.
@Override
public int execute(DriverContext driverContext) {
CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
if (createFunctionDesc != null) {
if (createFunctionDesc.isTemp()) {
return createTemporaryFunction(createFunctionDesc);
} else {
try {
if (createFunctionDesc.getReplicationSpec().isInReplicationScope()) {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
if (!createFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
// If the database is newer than the create event, then noop it.
LOG.debug("FunctionTask: Create Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
return 0;
}
}
return createPermanentFunction(Hive.get(conf), createFunctionDesc);
} catch (Exception e) {
setException(e);
LOG.error("Failed to create function", e);
return 1;
}
}
}
DropFunctionDesc dropFunctionDesc = work.getDropFunctionDesc();
if (dropFunctionDesc != null) {
if (dropFunctionDesc.isTemp()) {
return dropTemporaryFunction(dropFunctionDesc);
} else {
try {
if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()) {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(dropFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
if (!dropFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
// If the database is newer than the drop event, then noop it.
LOG.debug("FunctionTask: Drop Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
return 0;
}
}
return dropPermanentFunction(Hive.get(conf), dropFunctionDesc);
} catch (Exception e) {
setException(e);
LOG.error("Failed to drop function", e);
return 1;
}
}
}
if (work.getReloadFunctionDesc() != null) {
try {
Hive.get().reloadFunctions();
} catch (Exception e) {
setException(e);
LOG.error("Failed to reload functions", e);
return 1;
}
}
CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
if (createMacroDesc != null) {
return createMacro(createMacroDesc);
}
DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
if (dropMacroDesc != null) {
return dropMacro(dropMacroDesc);
}
return 0;
}
use of org.apache.hadoop.hive.ql.plan.CreateMacroDesc in project hive by apache.
the class MacroSemanticAnalyzer method analyzeCreateMacro.
@SuppressWarnings("unchecked")
private void analyzeCreateMacro(ASTNode ast) throws SemanticException {
String functionName = ast.getChild(0).getText();
// Temp macros are not allowed to have qualified names.
if (FunctionUtils.isQualifiedFunctionName(functionName)) {
throw new SemanticException("Temporary macro cannot be created with a qualified name.");
}
List<FieldSchema> arguments = BaseSemanticAnalyzer.getColumns((ASTNode) ast.getChild(1), true);
boolean isNoArgumentMacro = arguments.size() == 0;
RowResolver rowResolver = new RowResolver();
ArrayList<String> macroColNames = new ArrayList<String>(arguments.size());
ArrayList<TypeInfo> macroColTypes = new ArrayList<TypeInfo>(arguments.size());
final Set<String> actualColumnNames = new HashSet<String>();
if (!isNoArgumentMacro) {
/*
* Walk down expression to see which arguments are actually used.
*/
Node expression = (Node) ast.getChild(2);
PreOrderWalker walker = new PreOrderWalker(new Dispatcher() {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
if (nd instanceof ASTNode) {
ASTNode node = (ASTNode) nd;
if (node.getType() == HiveParser.TOK_TABLE_OR_COL) {
actualColumnNames.add(node.getChild(0).getText());
}
}
return null;
}
});
walker.startWalking(Collections.singletonList(expression), null);
}
for (FieldSchema argument : arguments) {
TypeInfo colType = TypeInfoUtils.getTypeInfoFromTypeString(argument.getType());
rowResolver.put("", argument.getName(), new ColumnInfo(argument.getName(), colType, "", false));
macroColNames.add(argument.getName());
macroColTypes.add(colType);
}
Set<String> expectedColumnNames = new LinkedHashSet<String>(macroColNames);
if (!expectedColumnNames.equals(actualColumnNames)) {
throw new SemanticException("Expected columns " + expectedColumnNames + " but found " + actualColumnNames);
}
if (expectedColumnNames.size() != macroColNames.size()) {
throw new SemanticException("At least one parameter name was used more than once " + macroColNames);
}
SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
;
ExprNodeDesc body;
if (isNoArgumentMacro) {
body = sa.genExprNodeDesc((ASTNode) ast.getChild(1), rowResolver);
} else {
body = sa.genExprNodeDesc((ASTNode) ast.getChild(2), rowResolver);
}
CreateMacroDesc desc = new CreateMacroDesc(functionName, macroColNames, macroColTypes, body);
rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
addEntities();
}
Aggregations