use of com.sap.hadoop.windowing.functions2.TableFunctionEvaluator in project SQLWindowing by hbutani.
the class WindowSpecTranslation method translateWindow.
/*
* compute the Description to use for the Input.
* get the inputInfo for the input: if the function has a MapPhase use the Map Inputfo.
* invoke translateWindowSpecOnInput on WdwSpec of TblFunc
* If TableFunc is the FunctionRegistry.WINDOWING_TABLE_FUNCTION:
* -
*/
static WindowDef translateWindow(QueryDef qDef, TableFuncDef tFnDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
TableFuncSpec tFnSpec = tFnDef.getTableFuncSpec();
/*
* for now the Language only allows explicit specification of Partition & Order clauses.
* Easy to allow references to a Global Window Spec.
*/
WindowSpec wSpec = new WindowSpec();
wSpec.setPartition(tFnSpec.getPartition());
wSpec.setOrder(tFnSpec.getOrder());
QueryInputDef iDef = tFnDef.getInput();
if (wSpec.getPartition() == null) {
return null;
}
String desc = getInputDescription(qDef, tFnDef);
TableFunctionEvaluator tFn = tFnDef.getFunction();
InputInfo iInfo = null;
if (tFn.isTransformsRawInput()) {
iInfo = tInfo.getMapInputInfo(tFnDef);
} else {
iInfo = tInfo.getInputInfo(iDef);
}
return translateWindowSpecOnInput(qDef, wSpec, iInfo, desc);
}
use of com.sap.hadoop.windowing.functions2.TableFunctionEvaluator in project SQLWindowing by hbutani.
the class QueryDefDeserializer method preVisit.
/*
* If the query has a map phase, the inputInfo is retrieved from the map
* output info of the table function definition. This is constructed using
* the map output oi of the table function definition. If the query does not
* have a map phase, the inputInfo is retrieved from the QueryInputDef
* (either HiveTableDef or HiveQueryDef) of the query.
*/
@Override
public void preVisit(TableFuncDef tblFuncDef) throws WindowingException {
TableFunctionEvaluator tEval = tblFuncDef.getFunction();
currentTFnResolver = FunctionRegistry.getTableFunctionResolver(tEval.getTableDef().getName());
currentTFnResolver.initialize(qDef, tblFuncDef, tEval);
if (tEval.isTransformsRawInput()) {
currentTFnResolver.setupRawInputOI();
inputInfo = qDef.getTranslationInfo().getMapInputInfo(tblFuncDef);
} else {
inputInfo = qDef.getTranslationInfo().getInputInfo(qInDef);
}
}
use of com.sap.hadoop.windowing.functions2.TableFunctionEvaluator in project SQLWindowing by hbutani.
the class QueryTranslationInfo method getMapInputInfo.
public InputInfo getMapInputInfo(TableFuncDef tDef) throws WindowingException {
TableFunctionEvaluator tFn = tDef.getFunction();
if (!tFn.isTransformsRawInput()) {
return null;
}
mapReshapeInfoMap = mapReshapeInfoMap == null ? new HashMap<String, InputInfo>() : mapReshapeInfoMap;
InputInfo ii = mapReshapeInfoMap.get(tDef.getAlias());
if (ii == null) {
ii = new InputInfo(this, tDef, tFn.getRawInputOI());
mapReshapeInfoMap.put(tDef.getAlias(), ii);
}
return ii;
}
use of com.sap.hadoop.windowing.functions2.TableFunctionEvaluator in project SQLWindowing by hbutani.
the class InputTranslation method translate.
/*
* <ol>
* <li> Get the <code>TableFunctionResolver</code> for this Function from the FunctionRegistry.
* <li> Create the TableFuncDef object.
* <li> Get the InputInfo for the input to this function.
* <li> Translate the Arguments to this Function in the Context of the InputInfo.
* <li> ask the TableFunctionResolver to create a TableFunctionEvaluator based on the Args passed in.
* <li> ask the TableFunctionEvaluator to setup the Map-side ObjectInspector. Gives a chance to functions that
* reshape the Input before it is partitioned to define the Shape after raw data is transformed.
* <li> Setup the Window Definition for this Function. The Window Definition is resolved wrt to the InputDef's
* Shape or the MapOI, for Functions that reshape the raw input.
* <li> ask the TableFunctionEvaluator to setup the Output ObjectInspector for this Function.
* <li> setup a Serde for the Output partition based on the OutputOI.
* </ol>
*/
private static TableFuncDef translate(QueryDef qDef, TableFuncSpec tSpec, QueryInputDef inputDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(tSpec.getName());
if (tFn == null) {
throw new WindowingException(sprintf("Unknown Table Function %s", tSpec.getName()));
}
TableFuncDef tDef = new TableFuncDef();
tDef.setSpec(tSpec);
tDef.setInput(inputDef);
InputInfo iInfo = tInfo.getInputInfo(inputDef);
/*
* translate args
*/
ArrayList<ASTNode> args = tSpec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
ArgDef argDef = translateTableFunctionArg(qDef, tDef, iInfo, expr);
tDef.addArg(argDef);
}
}
tFn.initialize(qDef, tDef);
TableFunctionEvaluator tEval = tFn.getEvaluator();
tDef.setFunction(tEval);
tFn.setupRawInputOI();
tDef.setWindow(WindowSpecTranslation.translateWindow(qDef, tDef));
tFn.setupOutputOI();
TranslateUtils.setupSerdeAndOI(tDef, inputDef, tInfo, tEval);
return tDef;
}
use of com.sap.hadoop.windowing.functions2.TableFunctionEvaluator in project SQLWindowing by hbutani.
the class MRUtils method initialize.
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws WindowingException
*/
public void initialize() throws WindowingException {
TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
hiveTableDef = tabDef.getHiveTableDef();
InputInfo inputInfo;
ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
TableFunctionEvaluator tEval = tabDef.getFunction();
/*
* If the query has a map phase, the inputInfo is retrieved from the map
* output info of the table function definition. This is constructed
* using the map output oi of the table function definition. If the
* query does not have a map phase, the inputInfo is retrieved from the
* QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
*/
if (tEval.isTransformsRawInput()) {
inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
} else {
inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
}
for (ColumnDef colDef : partColList) {
partCols.add(colDef.getExprNode());
}
ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
for (OrderColumnDef colDef : orderColList) {
Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
outputColumnNames.add(colDef.getAlias());
}
RowResolver rr = inputInfo.getRowResolver();
ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
for (ColumnInfo colInfo : colInfoList) {
String internalName = colInfo.getInternalName();
TypeInfo type = colInfo.getType();
valueCols.add(TranslateUtils.getExprDesc(internalName, type));
outputColumnNames.add(internalName);
}
}
Aggregations