use of com.sap.hadoop.windowing.query2.definition.OrderColumnDef in project SQLWindowing by hbutani.
the class WindowSpecTranslation method translateOrderColumn.
static OrderColumnDef translateOrderColumn(QueryDef qDef, InputInfo iInfo, OrderColumnSpec oSpec) throws WindowingException {
OrderColumnDef ocDef = new OrderColumnDef(oSpec);
translateColumn(qDef, ocDef, iInfo, oSpec);
TranslateUtils.validateComparable(ocDef.getOI(), sprintf("Partition Column %s is not comparable", oSpec));
return ocDef;
}
use of com.sap.hadoop.windowing.query2.definition.OrderColumnDef in project SQLWindowing by hbutani.
the class WindowFunctionTranslation method setupRankingArgs.
static void setupRankingArgs(QueryDef qDef, TableFuncDef windowTableFnDef, WindowFunctionDef wFnDef, WindowFunctionSpec wSpec) throws WindowingException {
if (wSpec.getArgs().size() > 0) {
throw new WindowingException("Ranking Functions can take no arguments");
}
QueryInputDef inpDef = windowTableFnDef.getInput();
InputInfo inpInfo = qDef.getTranslationInfo().getInputInfo(inpDef);
OrderDef oDef = getTableFuncOrderDef(windowTableFnDef);
ArrayList<OrderColumnDef> oCols = oDef.getColumns();
for (OrderColumnDef oCol : oCols) {
wFnDef.addArg(TranslateUtils.buildArgDef(qDef, inpInfo, oCol.getExpression()));
}
}
use of com.sap.hadoop.windowing.query2.definition.OrderColumnDef in project SQLWindowing by hbutani.
the class WindowSpecTranslation method translateOrder.
static OrderDef translateOrder(QueryDef qDef, String inputDesc, InputInfo iInfo, OrderSpec spec, PartitionDef pDef) throws WindowingException {
if (spec == null || spec.getColumns() == null || spec.getColumns().size() == 0) {
if (pDef == null)
return null;
return new OrderDef(pDef);
}
if (pDef == null) {
throw new WindowingException(sprintf("Input %s cannot have an Order spec w/o a Partition spec", inputDesc));
}
OrderDef oDef = new OrderDef(spec);
for (OrderColumnSpec colSpec : spec.getColumns()) {
OrderColumnDef cDef = translateOrderColumn(qDef, iInfo, colSpec);
oDef.addColumn(cDef);
}
/*
* either all partition columns must be in Order list or none must be specified.
* If none are specified then add them all.
*/
int numOfPartColumns = 0;
List<OrderColumnDef> orderCols = oDef.getColumns();
List<ColumnDef> partCols = pDef.getColumns();
int chkSize = partCols.size();
chkSize = chkSize > orderCols.size() ? orderCols.size() : chkSize;
for (int i = 0; i < chkSize; i++) {
if (orderCols.get(i).getSpec().getColumnName().equals(partCols.get(i).getSpec().getColumnName())) {
numOfPartColumns++;
} else
break;
}
if (numOfPartColumns != 0 && numOfPartColumns != partCols.size()) {
throw new WindowingException(sprintf("For Input %s:n all partition columns must be in order clause or none should be specified", inputDesc));
}
ArrayList<OrderColumnDef> combinedOrderCols = new ArrayList<OrderColumnDef>();
if (numOfPartColumns == 0) {
for (ColumnDef cDef : partCols) {
OrderColumnDef ocDef = new OrderColumnDef(cDef);
combinedOrderCols.add(ocDef);
}
combinedOrderCols.addAll(orderCols);
oDef.setColumns(combinedOrderCols);
}
return oDef;
}
use of com.sap.hadoop.windowing.query2.definition.OrderColumnDef in project SQLWindowing by hbutani.
the class QueryDefWalker method walk.
/**
* Visit the partition columns and order columns
* Visit the window frame definitions
* @param window
* @throws WindowingException
*/
protected void walk(WindowDef window) throws WindowingException {
if (window == null)
return;
PartitionDef pDef = window.getPartDef();
if (pDef != null) {
ArrayList<ColumnDef> cols = pDef.getColumns();
for (ColumnDef col : cols) {
visitor.visit(col);
}
visitor.visit(pDef);
}
OrderDef oDef = window.getOrderDef();
if (oDef != null) {
ArrayList<OrderColumnDef> ocols = oDef.getColumns();
for (OrderColumnDef ocol : ocols) {
visitor.visit(ocol);
}
visitor.visit(oDef);
}
WindowFrameDef wFrmDef = window.getWindow();
if (wFrmDef != null) {
walk(wFrmDef.getStart());
walk(wFrmDef.getEnd());
visitor.visit(wFrmDef);
}
visitor.visit(window);
}
use of com.sap.hadoop.windowing.query2.definition.OrderColumnDef in project SQLWindowing by hbutani.
the class MRUtils method initialize.
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws WindowingException
*/
public void initialize() throws WindowingException {
TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
hiveTableDef = tabDef.getHiveTableDef();
InputInfo inputInfo;
ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
TableFunctionEvaluator tEval = tabDef.getFunction();
/*
* If the query has a map phase, the inputInfo is retrieved from the map
* output info of the table function definition. This is constructed
* using the map output oi of the table function definition. If the
* query does not have a map phase, the inputInfo is retrieved from the
* QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
*/
if (tEval.isTransformsRawInput()) {
inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
} else {
inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
}
for (ColumnDef colDef : partColList) {
partCols.add(colDef.getExprNode());
}
ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
for (OrderColumnDef colDef : orderColList) {
Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
outputColumnNames.add(colDef.getAlias());
}
RowResolver rr = inputInfo.getRowResolver();
ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
for (ColumnInfo colInfo : colInfoList) {
String internalName = colInfo.getInternalName();
TypeInfo type = colInfo.getType();
valueCols.add(TranslateUtils.getExprDesc(internalName, type));
outputColumnNames.add(internalName);
}
}
Aggregations