use of com.sap.hadoop.windowing.query2.definition.QueryInputDef in project SQLWindowing by hbutani.
the class MRUtils method initialize.
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws WindowingException
*/
public void initialize() throws WindowingException {
TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
hiveTableDef = tabDef.getHiveTableDef();
InputInfo inputInfo;
ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
TableFunctionEvaluator tEval = tabDef.getFunction();
/*
* If the query has a map phase, the inputInfo is retrieved from the map
* output info of the table function definition. This is constructed
* using the map output oi of the table function definition. If the
* query does not have a map phase, the inputInfo is retrieved from the
* QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
*/
if (tEval.isTransformsRawInput()) {
inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
} else {
inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
}
for (ColumnDef colDef : partColList) {
partCols.add(colDef.getExprNode());
}
ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
for (OrderColumnDef colDef : orderColList) {
Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
outputColumnNames.add(colDef.getAlias());
}
RowResolver rr = inputInfo.getRowResolver();
ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
for (ColumnInfo colInfo : colInfoList) {
String internalName = colInfo.getInternalName();
TypeInfo type = colInfo.getType();
valueCols.add(TranslateUtils.getExprDesc(internalName, type));
outputColumnNames.add(internalName);
}
}
use of com.sap.hadoop.windowing.query2.definition.QueryInputDef in project SQLWindowing by hbutani.
the class InputTranslation method translate.
/*
* <ol>
* <li> Get the <code>TableFunctionResolver</code> for this Function from the FunctionRegistry.
* <li> Create the TableFuncDef object.
* <li> Get the InputInfo for the input to this function.
* <li> Translate the Arguments to this Function in the Context of the InputInfo.
* <li> ask the TableFunctionResolver to create a TableFunctionEvaluator based on the Args passed in.
* <li> ask the TableFunctionEvaluator to setup the Map-side ObjectInspector. Gives a chance to functions that
* reshape the Input before it is partitioned to define the Shape after raw data is transformed.
* <li> Setup the Window Definition for this Function. The Window Definition is resolved wrt to the InputDef's
* Shape or the MapOI, for Functions that reshape the raw input.
* <li> ask the TableFunctionEvaluator to setup the Output ObjectInspector for this Function.
* <li> setup a Serde for the Output partition based on the OutputOI.
* </ol>
*/
private static TableFuncDef translate(QueryDef qDef, TableFuncSpec tSpec, QueryInputDef inputDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(tSpec.getName());
if (tFn == null) {
throw new WindowingException(sprintf("Unknown Table Function %s", tSpec.getName()));
}
TableFuncDef tDef = new TableFuncDef();
tDef.setSpec(tSpec);
tDef.setInput(inputDef);
InputInfo iInfo = tInfo.getInputInfo(inputDef);
/*
* translate args
*/
ArrayList<ASTNode> args = tSpec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
ArgDef argDef = translateTableFunctionArg(qDef, tDef, iInfo, expr);
tDef.addArg(argDef);
}
}
tFn.initialize(qDef, tDef);
TableFunctionEvaluator tEval = tFn.getEvaluator();
tDef.setFunction(tEval);
tFn.setupRawInputOI();
tDef.setWindow(WindowSpecTranslation.translateWindow(qDef, tDef));
tFn.setupOutputOI();
TranslateUtils.setupSerdeAndOI(tDef, inputDef, tInfo, tEval);
return tDef;
}
use of com.sap.hadoop.windowing.query2.definition.QueryInputDef in project SQLWindowing by hbutani.
the class OutputTranslation method translateSelectExprs.
public static void translateSelectExprs(QueryDef qDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
QueryInputDef iDef = qDef.getInput();
InputInfo iInfo = tInfo.getInputInfo(iDef);
SelectDef selectDef = qDef.getSelectList();
SelectSpec selectSpec = qDef.getSpec().getSelectList();
Iterator<Object> selectExprsAndAliases = selectSpec.getColumnListAndAlias();
int i = 0;
ColumnDef cDef = null;
while (selectExprsAndAliases.hasNext()) {
Object[] o = (Object[]) selectExprsAndAliases.next();
boolean isWnFn = ((Boolean) o[0]).booleanValue();
if (isWnFn) {
cDef = translateWindowFnAlias(qDef, iInfo, i++, (String) o[1]);
} else {
cDef = translateSelectExpr(qDef, iInfo, i++, (String) o[1], (ASTNode) o[2]);
}
selectDef.addColumn(cDef);
}
TranslateUtils.setupSelectOI(selectDef);
}
use of com.sap.hadoop.windowing.query2.definition.QueryInputDef in project SQLWindowing by hbutani.
the class WhereTranslation method translate.
public static void translate(QueryDef qDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
QuerySpec spec = qDef.getSpec();
ASTNode wExpr = spec.getWhereExpr();
if (wExpr == null)
return;
WhereDef whDef = new WhereDef();
whDef.setExpression(wExpr);
QueryInputDef iDef = qDef.getInput();
InputInfo iInfo = tInfo.getInputInfo(iDef);
ExprNodeDesc exprNode = TranslateUtils.buildExprNode(wExpr, iInfo.getTypeCheckCtx());
ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(tInfo, exprNode);
ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
try {
ObjectInspectorConverters.getConverter(oi, PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
} catch (Throwable t) {
throw new WindowingException("Where Expr must be convertible to a boolean value", t);
}
whDef.setExprNode(exprNode);
whDef.setExprEvaluator(exprEval);
whDef.setOI(oi);
qDef.setWhere(whDef);
}
Aggregations