use of com.sap.hadoop.windowing.query2.definition.HiveQueryDef in project SQLWindowing by hbutani.
the class WindowingShell method execute.
protected void execute(QueryDef q, QueryOutputPrinter outP) throws WindowingException {
QuerySpec qSpec = q.getSpec();
try {
executor.execute(q, this);
} finally {
HiveTableDef hiveTable = q.getHiveTableDef();
if (hiveTable instanceof HiveQueryDef) {
String tableName = hiveTable.getHiveTableSpec().getTableName();
hiveQryExec.dropTable(tableName);
}
}
if (qSpec.getOutput().getHiveTable() != null) {
loadToOutputTable(q);
}
if (outP != null) {
outP.printQueryOutput(q, cfg);
}
}
use of com.sap.hadoop.windowing.query2.definition.HiveQueryDef in project SQLWindowing by hbutani.
the class InputTranslation method translate.
private static HiveQueryDef translate(QueryDef qDef, HiveQuerySpec spec) throws WindowingException {
HiveQueryDef def = new HiveQueryDef();
HiveQueryExecutor hiveQryExec = qDef.getTranslationInfo().getHiveQueryExecutor();
Hive hive = qDef.getTranslationInfo().getHive();
String tableName = hiveQryExec.createTableAsQuery(spec.getHiveQuery());
HiveTableSpec tSpec = new HiveTableSpec();
tSpec.setDbName(hive.getCurrentDatabase());
tSpec.setTableName(tableName);
tSpec.setPartition(spec.getPartition());
tSpec.setOrder(spec.getOrder());
def = (HiveQueryDef) InputTranslation.translate(qDef, tSpec, (HiveTableDef) def);
return def;
}
use of com.sap.hadoop.windowing.query2.definition.HiveQueryDef in project SQLWindowing by hbutani.
the class MRUtils method initialize.
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws WindowingException
*/
public void initialize() throws WindowingException {
TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
hiveTableDef = tabDef.getHiveTableDef();
InputInfo inputInfo;
ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
TableFunctionEvaluator tEval = tabDef.getFunction();
/*
* If the query has a map phase, the inputInfo is retrieved from the map
* output info of the table function definition. This is constructed
* using the map output oi of the table function definition. If the
* query does not have a map phase, the inputInfo is retrieved from the
* QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
*/
if (tEval.isTransformsRawInput()) {
inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
} else {
inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
}
for (ColumnDef colDef : partColList) {
partCols.add(colDef.getExprNode());
}
ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
for (OrderColumnDef colDef : orderColList) {
Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
outputColumnNames.add(colDef.getAlias());
}
RowResolver rr = inputInfo.getRowResolver();
ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
for (ColumnInfo colInfo : colInfoList) {
String internalName = colInfo.getInternalName();
TypeInfo type = colInfo.getType();
valueCols.add(TranslateUtils.getExprDesc(internalName, type));
outputColumnNames.add(internalName);
}
}
Aggregations