use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class OutputTranslation method translateSelectExpr.
public static ColumnDef translateSelectExpr(QueryDef qDef, InputInfo iInfo, int colIdx, String alias, ASTNode expr) throws WindowingException {
ColumnDef cDef = new ColumnDef((ColumnSpec) null);
ExprNodeDesc exprNode = TranslateUtils.buildExprNode(expr, iInfo.getTypeCheckCtx());
ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(qDef.getTranslationInfo(), exprNode);
ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
cDef.setExpression(expr);
cDef.setExprNode(exprNode);
cDef.setExprEvaluator(exprEval);
cDef.setOI(oi);
cDef.setAlias(getAlias(alias, expr, colIdx));
return cDef;
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class MRUtils method initialize.
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws WindowingException
*/
public void initialize() throws WindowingException {
TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
hiveTableDef = tabDef.getHiveTableDef();
InputInfo inputInfo;
ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
TableFunctionEvaluator tEval = tabDef.getFunction();
/*
* If the query has a map phase, the inputInfo is retrieved from the map
* output info of the table function definition. This is constructed
* using the map output oi of the table function definition. If the
* query does not have a map phase, the inputInfo is retrieved from the
* QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
*/
if (tEval.isTransformsRawInput()) {
inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
} else {
inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
}
for (ColumnDef colDef : partColList) {
partCols.add(colDef.getExprNode());
}
ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
for (OrderColumnDef colDef : orderColList) {
Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
outputColumnNames.add(colDef.getAlias());
}
RowResolver rr = inputInfo.getRowResolver();
ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
for (ColumnInfo colInfo : colInfoList) {
String internalName = colInfo.getInternalName();
TypeInfo type = colInfo.getType();
valueCols.add(TranslateUtils.getExprDesc(internalName, type));
outputColumnNames.add(internalName);
}
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class OutputTranslation method setupOutputSerDe.
@SuppressWarnings("unchecked")
public static void setupOutputSerDe(HiveConf hCfg, SelectDef selectDef, QueryOutputDef oDef) throws WindowingException {
String serDeClassName = oDef.getSpec().getSerDeClass();
Properties serDeProps = oDef.getSpec().getSerDeProps();
Class<? extends SerDe> serDeClass;
SerDe serde;
try {
serDeClass = (Class<? extends SerDe>) Class.forName(serDeClassName);
serde = serDeClass.newInstance();
} catch (Exception e) {
throw new WindowingException("Internal error, initializing output SerDe", e);
}
StringBuilder colNames = new StringBuilder();
StringBuilder colTypes = new StringBuilder();
boolean first = true;
for (ColumnDef cDef : selectDef.getColumns()) {
if (!first) {
colNames.append(",");
colTypes.append(",");
} else
first = false;
colNames.append(cDef.getAlias());
colTypes.append(TypeInfoUtils.getTypeInfoFromObjectInspector(cDef.getOI()).getTypeName());
}
serDeProps.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, colNames.toString());
serDeProps.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES, colTypes.toString());
try {
serde.initialize(hCfg, serDeProps);
} catch (SerDeException se) {
throw new WindowingException("Failed to initialize output SerDe", se);
}
oDef.setSerDe(serde);
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class OutputTranslation method translateSelectExprs.
public static void translateSelectExprs(QueryDef qDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
QueryInputDef iDef = qDef.getInput();
InputInfo iInfo = tInfo.getInputInfo(iDef);
SelectDef selectDef = qDef.getSelectList();
SelectSpec selectSpec = qDef.getSpec().getSelectList();
Iterator<Object> selectExprsAndAliases = selectSpec.getColumnListAndAlias();
int i = 0;
ColumnDef cDef = null;
while (selectExprsAndAliases.hasNext()) {
Object[] o = (Object[]) selectExprsAndAliases.next();
boolean isWnFn = ((Boolean) o[0]).booleanValue();
if (isWnFn) {
cDef = translateWindowFnAlias(qDef, iInfo, i++, (String) o[1]);
} else {
cDef = translateSelectExpr(qDef, iInfo, i++, (String) o[1], (ASTNode) o[2]);
}
selectDef.addColumn(cDef);
}
TranslateUtils.setupSelectOI(selectDef);
}
use of com.sap.hadoop.windowing.query2.definition.ColumnDef in project SQLWindowing by hbutani.
the class TranslateUtils method setupSelectOI.
/**
* Use the column aliases and OIs on each ColumnDef
* to recreate the OI on the select list
* @param sDef
*/
public static void setupSelectOI(SelectDef sDef) {
ArrayList<ColumnDef> selColDefs = sDef.getColumns();
ArrayList<String> colAliases = new ArrayList<String>();
ArrayList<ObjectInspector> colOIs = new ArrayList<ObjectInspector>();
for (ColumnDef colDef : selColDefs) {
colAliases.add(colDef.getAlias());
colOIs.add(colDef.getOI());
}
sDef.setOI(ObjectInspectorFactory.getStandardStructObjectInspector(colAliases, colOIs));
}
Aggregations