use of org.apache.hadoop.hive.ql.exec.PTFPartition in project hive by apache.
the class WindowingTableFunction method execute.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
PTFPartition iPart = pItr.getPartition();
StructObjectInspector inputOI = iPart.getOutputOI();
WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
for (WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
boolean processWindow = processWindow(wFn.getWindowFrame());
pItr.reset();
if (!processWindow) {
Object out = evaluateFunctionOnPartition(wFn, iPart);
if (!wFn.isPivotResult()) {
out = new SameList(iPart.size(), out);
}
oColumns.add((List<?>) out);
} else {
oColumns.add(executeFnwithWindow(wFn, iPart));
}
}
for (int i = 0; i < iPart.size(); i++) {
ArrayList oRow = new ArrayList();
Object iRow = iPart.getAt(i);
for (int j = 0; j < oColumns.size(); j++) {
oRow.add(oColumns.get(j).get(i));
}
for (StructField f : inputOI.getAllStructFieldRefs()) {
oRow.add(inputOI.getStructFieldData(iRow, f));
}
outP.append(oRow);
}
}
use of org.apache.hadoop.hive.ql.exec.PTFPartition in project hive by apache.
the class WindowingTableFunction method iterator.
@SuppressWarnings("rawtypes")
@Override
public Iterator<Object> iterator(PTFPartitionIterator<Object> pItr) throws HiveException {
WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
ArrayList<Object> output = new ArrayList<Object>();
List<?>[] outputFromPivotFunctions = new List<?>[wTFnDef.getWindowFunctions().size()];
ArrayList<Integer> wFnsWithWindows = new ArrayList<Integer>();
PTFPartition iPart = pItr.getPartition();
int i = 0;
for (WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
boolean processWindow = processWindow(wFn.getWindowFrame());
pItr.reset();
if (!processWindow && !wFn.isPivotResult()) {
Object out = evaluateFunctionOnPartition(wFn, iPart);
output.add(out);
} else if (wFn.isPivotResult()) {
GenericUDAFEvaluator streamingEval = wFn.getWFnEval().getWindowingEvaluator(wFn.getWindowFrame());
if (streamingEval != null && streamingEval instanceof ISupportStreamingModeForWindowing) {
ISupportStreamingModeForWindowing strEval = (ISupportStreamingModeForWindowing) streamingEval;
if (strEval.getRowsRemainingAfterTerminate() == 0) {
wFn.setWFnEval(streamingEval);
if (wFn.getOI() instanceof ListObjectInspector) {
ListObjectInspector listOI = (ListObjectInspector) wFn.getOI();
wFn.setOI(listOI.getListElementObjectInspector());
}
output.add(null);
wFnsWithWindows.add(i);
} else {
outputFromPivotFunctions[i] = (List) evaluateFunctionOnPartition(wFn, iPart);
output.add(null);
}
} else {
outputFromPivotFunctions[i] = (List) evaluateFunctionOnPartition(wFn, iPart);
output.add(null);
}
} else {
output.add(null);
wFnsWithWindows.add(i);
}
i++;
}
for (i = 0; i < iPart.getOutputOI().getAllStructFieldRefs().size(); i++) {
output.add(null);
}
if (wTFnDef.getRankLimit() != -1) {
rnkLimitDef = new RankLimit(wTFnDef.getRankLimit(), wTFnDef.getRankLimitFunction(), wTFnDef.getWindowFunctions());
}
return new WindowingIterator(iPart, output, outputFromPivotFunctions, ArrayUtils.toPrimitive(wFnsWithWindows.toArray(new Integer[wFnsWithWindows.size()])));
}
Aggregations