use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.
the class PTFTranslator method translate.
private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec) throws SemanticException {
if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) {
return null;
}
PartitionDef pDef = new PartitionDef();
for (PartitionExpression pExpr : spec.getExpressions()) {
PTFExpressionDef expDef = translate(inpShape, pExpr);
pDef.addExpression(expDef);
}
return pDef;
}
use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.
the class PTFOperator method setupKeysWrapper.
protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException {
PartitionDef pDef = conf.getStartOfChain().getPartition();
List<PTFExpressionDef> exprs = pDef.getExpressions();
int numExprs = exprs.size();
ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs];
ObjectInspector[] keyOIs = new ObjectInspector[numExprs];
ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs];
for (int i = 0; i < numExprs; i++) {
PTFExpressionDef exprDef = exprs.get(i);
/*
* Why cannot we just use the ExprNodeEvaluator on the column?
* - because on the reduce-side it is initialized based on the rowOI of the HiveTable
* and not the OI of the parent of this Operator on the reduce-side
*/
keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode());
keyOIs[i] = keyFields[i].initialize(inputOI);
currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE);
}
keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs);
newKeys = keyWrapperFactory.getKeyWrapper();
}
use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.
the class PTFTranslator method translatePartitioning.
private void translatePartitioning(PartitionedTableFunctionDef def, PartitionedTableFunctionSpec spec) throws SemanticException {
applyConstantPartition(spec);
if (spec.getPartition() == null) {
return;
}
PartitionDef partDef = translate(def.getRawInputShape(), spec.getPartition());
OrderDef orderDef = translate(def.getRawInputShape(), spec.getOrder(), partDef);
def.setPartition(partDef);
def.setOrder(orderDef);
}
use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.
the class PTFTranslator method translate.
public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException {
init(semAly, hCfg, inputRR, unparseT);
windowingSpec = wdwSpec;
ptfDesc = new PTFDesc();
ptfDesc.setCfg(hCfg);
ptfDesc.setLlInfo(llInfo);
WindowTableFunctionDef wdwTFnDef = new WindowTableFunctionDef();
ptfDesc.setFuncDef(wdwTFnDef);
PTFQueryInputSpec inpSpec = new PTFQueryInputSpec();
inpSpec.setType(PTFQueryInputType.WINDOWING);
wdwTFnDef.setInput(translate(inpSpec, 0));
ShapeDetails inpShape = wdwTFnDef.getInput().getOutputShape();
WindowingTableFunctionResolver tFn = (WindowingTableFunctionResolver) FunctionRegistry.getTableFunctionResolver(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
if (tFn == null) {
throw new SemanticException(String.format("Internal Error: Unknown Table Function %s", FunctionRegistry.WINDOWING_TABLE_FUNCTION));
}
wdwTFnDef.setName(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
wdwTFnDef.setResolverClassName(tFn.getClass().getName());
wdwTFnDef.setAlias("ptf_" + 1);
wdwTFnDef.setExpressionTreeString(null);
wdwTFnDef.setTransformsRawInput(false);
tFn.initialize(hCfg, ptfDesc, wdwTFnDef);
TableFunctionEvaluator tEval = tFn.getEvaluator();
wdwTFnDef.setTFunction(tEval);
wdwTFnDef.setCarryForwardNames(tFn.carryForwardNames());
wdwTFnDef.setRawInputShape(inpShape);
PartitioningSpec partiSpec = wdwSpec.getQueryPartitioningSpec();
if (partiSpec == null) {
throw new SemanticException("Invalid use of Windowing: there is no Partitioning associated with Windowing");
}
PartitionDef partDef = translate(inpShape, wdwSpec.getQueryPartitionSpec());
OrderDef ordDef = translate(inpShape, wdwSpec.getQueryOrderSpec(), partDef);
wdwTFnDef.setPartition(partDef);
wdwTFnDef.setOrder(ordDef);
/*
* process Wdw functions
*/
ArrayList<WindowFunctionDef> windowFunctions = new ArrayList<WindowFunctionDef>();
if (wdwSpec.getWindowExpressions() != null) {
for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
if (expr instanceof WindowFunctionSpec) {
WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec) expr);
windowFunctions.add(wFnDef);
}
}
wdwTFnDef.setWindowFunctions(windowFunctions);
}
/*
* set outputFromWdwFnProcessing
*/
ArrayList<String> aliases = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (WindowFunctionDef wFnDef : windowFunctions) {
aliases.add(wFnDef.getAlias());
if (wFnDef.isPivotResult()) {
fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
} else {
fieldOIs.add(wFnDef.getOI());
}
}
PTFTranslator.addInputColumnsToList(inpShape, aliases, fieldOIs);
StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
tFn.setWdwProcessingOutputOI(wdwOutOI);
RowResolver wdwOutRR = buildRowResolverForWindowing(wdwTFnDef);
ShapeDetails wdwOutShape = setupShape(wdwOutOI, null, wdwOutRR);
wdwTFnDef.setOutputShape(wdwOutShape);
tFn.setupOutputOI();
PTFDeserializer.alterOutputOIForStreaming(ptfDesc);
return ptfDesc;
}
Aggregations