Search in sources :

Example 1 with Order

use of com.sap.hadoop.metadata.Order in project SQLWindowing by hbutani.

the class MRUtils method initialize.

/**
 * Construct the data structures containing ExprNodeDesc for partition
 * columns and order columns. Use the input definition to construct the list
 * of output columns for the ReduceSinkOperator
 *
 * @throws WindowingException
 */
public void initialize() throws WindowingException {
    TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
    hiveTableDef = tabDef.getHiveTableDef();
    InputInfo inputInfo;
    ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
    TableFunctionEvaluator tEval = tabDef.getFunction();
    /*
		 * If the query has a map phase, the inputInfo is retrieved from the map
		 * output info of the table function definition. This is constructed
		 * using the map output oi of the table function definition. If the
		 * query does not have a map phase, the inputInfo is retrieved from the
		 * QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
		 */
    if (tEval.isTransformsRawInput()) {
        inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
    } else {
        inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
    }
    for (ColumnDef colDef : partColList) {
        partCols.add(colDef.getExprNode());
    }
    ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
    for (OrderColumnDef colDef : orderColList) {
        Order order = colDef.getOrder();
        if (order.name().equals("ASC")) {
            orderString.append('+');
        } else {
            orderString.append('-');
        }
        orderCols.add(colDef.getExprNode());
        outputColumnNames.add(colDef.getAlias());
    }
    RowResolver rr = inputInfo.getRowResolver();
    ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
    for (ColumnInfo colInfo : colInfoList) {
        String internalName = colInfo.getInternalName();
        TypeInfo type = colInfo.getType();
        valueCols.add(TranslateUtils.getExprDesc(internalName, type));
        outputColumnNames.add(internalName);
    }
}
Also used : Order(com.sap.hadoop.metadata.Order) OrderColumnDef(com.sap.hadoop.windowing.query2.definition.OrderColumnDef) ColumnDef(com.sap.hadoop.windowing.query2.definition.ColumnDef) OrderColumnDef(com.sap.hadoop.windowing.query2.definition.OrderColumnDef) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) TableFuncDef(com.sap.hadoop.windowing.query2.definition.TableFuncDef) InputInfo(com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo) TableFunctionEvaluator(com.sap.hadoop.windowing.functions2.TableFunctionEvaluator)

Aggregations

Order (com.sap.hadoop.metadata.Order)1 TableFunctionEvaluator (com.sap.hadoop.windowing.functions2.TableFunctionEvaluator)1 ColumnDef (com.sap.hadoop.windowing.query2.definition.ColumnDef)1 OrderColumnDef (com.sap.hadoop.windowing.query2.definition.OrderColumnDef)1 TableFuncDef (com.sap.hadoop.windowing.query2.definition.TableFuncDef)1 InputInfo (com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo)1 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)1 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)1 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)1