use of com.sap.hadoop.windowing.runtime2.Partition in project SQLWindowing by hbutani.
the class PTFOperator method processInputPartition.
protected void processInputPartition() throws HiveException {
try {
Partition outPart = Executor.executeChain(qDef, inputPart);
Executor.executeSelectList(qDef, outPart, new ForwardPTF());
} catch (WindowingException we) {
throw new HiveException("Cannot close PTFOperator.", we);
}
}
use of com.sap.hadoop.windowing.runtime2.Partition in project SQLWindowing by hbutani.
the class IOUtils method createPartition.
public static Partition createPartition(String partitionClass, int partitionMemSize, WindowingInput wIn) throws WindowingException {
try {
SerDe serDe = (SerDe) wIn.getDeserializer();
StructObjectInspector oI = (StructObjectInspector) serDe.getObjectInspector();
Partition p = new Partition(partitionClass, partitionMemSize, serDe, oI);
Writable w = wIn.createRow();
while (wIn.next(w) != -1) {
p.append(w);
}
return p;
} catch (WindowingException we) {
throw we;
} catch (Exception e) {
throw new WindowingException(e);
}
}
use of com.sap.hadoop.windowing.runtime2.Partition in project SQLWindowing by hbutani.
the class TableFunctionEvaluator method execute.
public Partition execute(Partition iPart) throws WindowingException {
PartitionIterator<Object> pItr = iPart.iterator();
RuntimeUtils.connectLeadLagFunctionsToPartition(qDef, pItr);
Partition outP = new Partition(getPartitionClass(), getPartitionMemSize(), tDef.getSerde(), OI);
execute(pItr, outP);
return outP;
}
use of com.sap.hadoop.windowing.runtime2.Partition in project SQLWindowing by hbutani.
the class WindowingTableFunction method execute.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void execute(PartitionIterator<Object> pItr, Partition outP) throws WindowingException {
ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
Partition iPart = pItr.getPartition();
StructObjectInspector inputOI;
try {
inputOI = (StructObjectInspector) iPart.getSerDe().getObjectInspector();
} catch (SerDeException se) {
throw new WindowingException(se);
}
try {
for (WindowFunctionDef wFn : wFnDefs) {
boolean processWindow = wFn.getWindow() != null;
pItr.reset();
if (!processWindow) {
GenericUDAFEvaluator fEval = wFn.getEvaluator();
Object[] args = new Object[wFn.getArgs().size()];
AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
while (pItr.hasNext()) {
Object row = pItr.next();
int i = 0;
for (ArgDef arg : wFn.getArgs()) {
args[i++] = arg.getExprEvaluator().evaluate(row);
}
fEval.aggregate(aggBuffer, args);
}
Object out = fEval.evaluate(aggBuffer);
WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFn.getSpec().getName());
if (!wFnInfo.isPivotResult()) {
out = new SameList(iPart.size(), out);
}
oColumns.add((List<?>) out);
} else {
oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart));
}
}
for (int i = 0; i < iPart.size(); i++) {
ArrayList oRow = new ArrayList();
Object iRow = iPart.getAt(i);
for (StructField f : inputOI.getAllStructFieldRefs()) {
oRow.add(inputOI.getStructFieldData(iRow, f));
}
for (int j = 0; j < oColumns.size(); j++) {
oRow.add(oColumns.get(j).get(i));
}
outP.append(oRow);
}
} catch (HiveException he) {
throw new WindowingException(he);
}
}
use of com.sap.hadoop.windowing.runtime2.Partition in project SQLWindowing by hbutani.
the class PTFOperator method processMapFunction.
protected void processMapFunction() throws HiveException {
try {
TableFuncDef tDef = RuntimeUtils.getFirstTableFunction(qDef);
Partition outPart = tDef.getFunction().transformRawInput(inputPart);
PartitionIterator<Object> pItr = outPart.iterator();
while (pItr.hasNext()) {
Object oRow = pItr.next();
forward(oRow, outputObjInspector);
}
} catch (WindowingException we) {
throw new HiveException("Cannot close PTFOperator.", we);
}
}
Aggregations