use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class ExplainSQRewriteTask method addRewrites.
void addRewrites(TokenRewriteStream stream, QBSubQuery sq, String program, PrintStream out, String qbAlias, boolean isWhere, StringBuilder addedJoins) {
ASTNode sqNode = sq.getOriginalSubQueryASTForRewrite();
ASTNode tokQry = getQueryASTNode(sqNode);
ASTNode tokInsert = (ASTNode) tokQry.getChild(1);
ASTNode tokWhere = null;
for (int i = 0; i < tokInsert.getChildCount(); i++) {
if (tokInsert.getChild(i).getType() == HiveParser.TOK_WHERE) {
tokWhere = (ASTNode) tokInsert.getChild(i);
break;
}
}
SubQueryDiagnostic.QBSubQueryRewrite diag = sq.getDiagnostic();
String sqStr = diag.getRewrittenQuery();
String joinCond = diag.getJoiningCondition();
/*
* the SubQuery predicate has been hoisted as a Join. The SubQuery predicate is replaced
* by a 'true' predicate in the Outer QB's where/having clause.
*/
stream.replace(program, sqNode.getTokenStartIndex(), sqNode.getTokenStopIndex(), "1 = 1");
String sqJoin = " " + getJoinKeyWord(sq) + " " + sqStr + " " + joinCond;
addedJoins.append(" ").append(sqJoin);
String postJoinCond = diag.getOuterQueryPostJoinCond();
if (postJoinCond != null) {
stream.insertAfter(program, tokWhere.getTokenStopIndex(), " and " + postJoinCond);
}
String qualifier = isWhere ? "Where Clause " : "Having Clause ";
if (qbAlias != null) {
qualifier = qualifier + "for Query Block '" + qbAlias + "' ";
}
out.println(String.format("\n%s Rewritten SubQuery:\n%s", qualifier, diag.getRewrittenQuery()));
out.println(String.format("\n%s SubQuery Joining Condition:\n%s", qualifier, diag.getJoiningCondition()));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.
the class WindowingTypeCheckProcFactory method processGByExpr.
/**
* Function to do groupby subexpression elimination. This is called by all
* the processors initially. As an example, consider the query select a+b,
* count(1) from T group by a+b; Then a+b is already precomputed in the
* group by operators key, so we substitute a+b in the select list with the
* internal column name of the a+b expression that appears in the in input
* row resolver.
*
* @param nd
* The node that is being inspected.
* @param procCtx
* The processor context.
*
* @return exprNodeColumnDesc.
*/
public static ExprNodeDesc processGByExpr(Node nd, Object procCtx) throws SemanticException {
// We recursively create the exprNodeDesc. Base cases: when we encounter
// a column ref, we convert that into an exprNodeColumnDesc; when we
// encounter
// a constant, we convert that into an exprNodeConstantDesc. For others
// we
// just
// build the exprNodeFuncDesc with recursively built children.
ASTNode expr = (ASTNode) nd;
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
RowResolver input = ctx.getInputRR();
ExprNodeDesc desc = null;
// If the current subExpression is pre-calculated, as in Group-By etc.
ColumnInfo colInfo = input.getExpression(expr);
if (colInfo != null) {
desc = new ExprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(), colInfo.getTabAlias(), colInfo.getIsVirtualCol());
// }
return desc;
}
return desc;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.
the class InputTranslation method translate.
/*
* <ol>
* <li> Get the <code>TableFunctionResolver</code> for this Function from the FunctionRegistry.
* <li> Create the TableFuncDef object.
* <li> Get the InputInfo for the input to this function.
* <li> Translate the Arguments to this Function in the Context of the InputInfo.
* <li> ask the TableFunctionResolver to create a TableFunctionEvaluator based on the Args passed in.
* <li> ask the TableFunctionEvaluator to setup the Map-side ObjectInspector. Gives a chance to functions that
* reshape the Input before it is partitioned to define the Shape after raw data is transformed.
* <li> Setup the Window Definition for this Function. The Window Definition is resolved wrt to the InputDef's
* Shape or the MapOI, for Functions that reshape the raw input.
* <li> ask the TableFunctionEvaluator to setup the Output ObjectInspector for this Function.
* <li> setup a Serde for the Output partition based on the OutputOI.
* </ol>
*/
private static TableFuncDef translate(QueryDef qDef, TableFuncSpec tSpec, QueryInputDef inputDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(tSpec.getName());
if (tFn == null) {
throw new WindowingException(sprintf("Unknown Table Function %s", tSpec.getName()));
}
TableFuncDef tDef = new TableFuncDef();
tDef.setSpec(tSpec);
tDef.setInput(inputDef);
InputInfo iInfo = tInfo.getInputInfo(inputDef);
/*
* translate args
*/
ArrayList<ASTNode> args = tSpec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
ArgDef argDef = translateTableFunctionArg(qDef, tDef, iInfo, expr);
tDef.addArg(argDef);
}
}
tFn.initialize(qDef, tDef);
TableFunctionEvaluator tEval = tFn.getEvaluator();
tDef.setFunction(tEval);
tFn.setupRawInputOI();
tDef.setWindow(WindowSpecTranslation.translateWindow(qDef, tDef));
tFn.setupOutputOI();
TranslateUtils.setupSerdeAndOI(tDef, inputDef, tInfo, tEval);
return tDef;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createRevokeTask.
@Override
public Task<? extends Serializable> createRevokeTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
PrivilegeObjectDesc hiveObj = null;
boolean grantOption = false;
if (ast.getChildCount() > 2) {
ASTNode astChild = (ASTNode) ast.getChild(2);
hiveObj = analyzePrivilegeObject(astChild, outputs);
if (null != ast.getFirstChildWithType(HiveParser.TOK_GRANT_OPTION_FOR)) {
grantOption = true;
}
}
RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowGrantTask.
@Override
public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
PrincipalDesc principalDesc = null;
PrivilegeObjectDesc privHiveObj = null;
ASTNode param = null;
if (ast.getChildCount() > 0) {
param = (ASTNode) ast.getChild(0);
principalDesc = AuthorizationParseUtils.getPrincipalDesc(param);
if (principalDesc != null) {
// shift one
param = (ASTNode) ast.getChild(1);
}
}
if (param != null) {
if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
privHiveObj = new PrivilegeObjectDesc();
} else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
privHiveObj = parsePrivObject(param);
}
}
ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj);
return TaskFactory.get(new DDLWork(inputs, outputs, showGrant));
}
Aggregations