use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class WindowSpecTranslation method translatePartitionColumn.
static ColumnDef translatePartitionColumn(QueryDef qDef, InputInfo iInfo, ColumnSpec cSpec) throws WindowingException {
ColumnDef cDef = new ColumnDef(cSpec);
translateColumn(qDef, cDef, iInfo, cSpec);
TranslateUtils.validateComparable(cDef.getOI(), sprintf("Partition Column %s is not comparable", cSpec));
return cDef;
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class WindowSpecTranslation method translatePartition.
static PartitionDef translatePartition(QueryDef qDef, InputInfo iInfo, PartitionSpec spec) throws WindowingException {
if (spec == null || spec.getColumns() == null || spec.getColumns().size() == 0)
return null;
PartitionDef pDef = new PartitionDef(spec);
for (ColumnSpec colSpec : spec.getColumns()) {
ColumnDef cDef = translatePartitionColumn(qDef, iInfo, colSpec);
pDef.addColumn(cDef);
}
return pDef;
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class OutputTranslation method translateSelectExpr.
public static ColumnDef translateSelectExpr(QueryDef qDef, InputInfo iInfo, int colIdx, String alias, ASTNode expr) throws WindowingException {
ColumnDef cDef = new ColumnDef((ColumnSpec) null);
ExprNodeDesc exprNode = TranslateUtils.buildExprNode(expr, iInfo.getTypeCheckCtx());
ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(qDef.getTranslationInfo(), exprNode);
ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
cDef.setExpression(expr);
cDef.setExprNode(exprNode);
cDef.setExprEvaluator(exprEval);
cDef.setOI(oi);
cDef.setAlias(getAlias(alias, expr, colIdx));
return cDef;
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class WindowSpecTranslation method translateOrder.
static OrderDef translateOrder(QueryDef qDef, String inputDesc, InputInfo iInfo, OrderSpec spec, PartitionDef pDef) throws WindowingException {
if (spec == null || spec.getColumns() == null || spec.getColumns().size() == 0) {
if (pDef == null)
return null;
return new OrderDef(pDef);
}
if (pDef == null) {
throw new WindowingException(sprintf("Input %s cannot have an Order spec w/o a Partition spec", inputDesc));
}
OrderDef oDef = new OrderDef(spec);
for (OrderColumnSpec colSpec : spec.getColumns()) {
OrderColumnDef cDef = translateOrderColumn(qDef, iInfo, colSpec);
oDef.addColumn(cDef);
}
/*
* either all partition columns must be in Order list or none must be specified.
* If none are specified then add them all.
*/
int numOfPartColumns = 0;
List<OrderColumnDef> orderCols = oDef.getColumns();
List<ColumnDef> partCols = pDef.getColumns();
int chkSize = partCols.size();
chkSize = chkSize > orderCols.size() ? orderCols.size() : chkSize;
for (int i = 0; i < chkSize; i++) {
if (orderCols.get(i).getSpec().getColumnName().equals(partCols.get(i).getSpec().getColumnName())) {
numOfPartColumns++;
} else
break;
}
if (numOfPartColumns != 0 && numOfPartColumns != partCols.size()) {
throw new WindowingException(sprintf("For Input %s:n all partition columns must be in order clause or none should be specified", inputDesc));
}
ArrayList<OrderColumnDef> combinedOrderCols = new ArrayList<OrderColumnDef>();
if (numOfPartColumns == 0) {
for (ColumnDef cDef : partCols) {
OrderColumnDef ocDef = new OrderColumnDef(cDef);
combinedOrderCols.add(ocDef);
}
combinedOrderCols.addAll(orderCols);
oDef.setColumns(combinedOrderCols);
}
return oDef;
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class MRUtils method initialize.
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws WindowingException
*/
public void initialize() throws WindowingException {
TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
hiveTableDef = tabDef.getHiveTableDef();
InputInfo inputInfo;
ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
TableFunctionEvaluator tEval = tabDef.getFunction();
/*
* If the query has a map phase, the inputInfo is retrieved from the map
* output info of the table function definition. This is constructed
* using the map output oi of the table function definition. If the
* query does not have a map phase, the inputInfo is retrieved from the
* QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
*/
if (tEval.isTransformsRawInput()) {
inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
} else {
inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
}
for (ColumnDef colDef : partColList) {
partCols.add(colDef.getExprNode());
}
ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
for (OrderColumnDef colDef : orderColList) {
Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
outputColumnNames.add(colDef.getAlias());
}
RowResolver rr = inputInfo.getRowResolver();
ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
for (ColumnInfo colInfo : colInfoList) {
String internalName = colInfo.getInternalName();
TypeInfo type = colInfo.getType();
valueCols.add(TranslateUtils.getExprDesc(internalName, type));
outputColumnNames.add(internalName);
}
}
Aggregations