use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFTranslator method copyShape.
private ShapeDetails copyShape(ShapeDetails src) {
ShapeDetails dest = new ShapeDetails();
dest.setSerdeClassName(src.getSerdeClassName());
dest.setSerdeProps(src.getSerdeProps());
dest.setColumnNames(src.getColumnNames());
dest.setOI(src.getOI());
dest.setSerde(src.getSerde());
dest.setRr(src.getRr());
dest.setTypeCheckCtx(src.getTypeCheckCtx());
return dest;
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFDeserializer method initialize.
protected void initialize(PTFQueryInputDef def, StructObjectInspector OI) throws HiveException {
ShapeDetails outShape = def.getOutputShape();
initialize(outShape, OI);
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFDeserializer method initializeWindowing.
public void initializeWindowing(WindowTableFunctionDef def) throws HiveException {
ShapeDetails inpShape = def.getInput().getOutputShape();
/*
* 1. setup resolve, make connections
*/
TableFunctionEvaluator tEval = def.getTFunction();
WindowingTableFunctionResolver tResolver = (WindowingTableFunctionResolver) constructResolver(def.getResolverClassName());
tResolver.initialize(ptfDesc, def, tEval);
/*
* 2. initialize WFns.
*/
for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
if (wFnDef.getArgs() != null) {
for (PTFExpressionDef arg : wFnDef.getArgs()) {
initialize(arg, inpShape);
}
}
if (wFnDef.getWindowFrame() != null) {
WindowFrameDef wFrmDef = wFnDef.getWindowFrame();
initialize(wFrmDef, inpShape);
}
setupWdwFnEvaluator(wFnDef);
}
ArrayList<String> aliases = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
aliases.add(wFnDef.getAlias());
if (wFnDef.isPivotResult()) {
fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
} else {
fieldOIs.add(wFnDef.getOI());
}
}
PTFDeserializer.addInputColumnsToList(inpShape, aliases, fieldOIs);
StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
tResolver.setWdwProcessingOutputOI(wdwOutOI);
initialize(def.getOutputShape(), wdwOutOI);
tResolver.initializeOutputOI();
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFDeserializer method initialize.
protected void initialize(PartitionedTableFunctionDef def) throws HiveException {
ShapeDetails inpShape = def.getInput().getOutputShape();
/*
* 1. initialize args
*/
if (def.getArgs() != null) {
for (PTFExpressionDef arg : def.getArgs()) {
initialize(arg, inpShape);
}
}
/*
* 2. setup resolve, make connections
*/
TableFunctionEvaluator tEval = def.getTFunction();
// TableFunctionResolver tResolver = FunctionRegistry.getTableFunctionResolver(def.getName());
TableFunctionResolver tResolver = constructResolver(def.getResolverClassName());
tResolver.initialize(ptfDesc, def, tEval);
/*
* 3. give Evaluator chance to setup for RawInput execution; setup RawInput shape
*/
if (tEval.isTransformsRawInput()) {
tResolver.initializeRawInputOI();
initialize(def.getRawInputShape(), tEval.getRawInputOI());
} else {
def.setRawInputShape(inpShape);
}
inpShape = def.getRawInputShape();
/*
* 4. give Evaluator chance to setup for Output execution; setup Output shape.
*/
tResolver.initializeOutputOI();
initialize(def.getOutputShape(), tEval.getOutputOI());
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFTranslator method translate.
private PartitionedTableFunctionDef translate(PartitionedTableFunctionSpec spec, PTFInputDef inpDef, int inpNum) throws SemanticException {
TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(spec.getName());
if (tFn == null) {
throw new SemanticException(String.format("Unknown Table Function %s", spec.getName()));
}
PartitionedTableFunctionDef def = new PartitionedTableFunctionDef();
def.setInput(inpDef);
def.setName(spec.getName());
def.setResolverClassName(tFn.getClass().getName());
def.setAlias(spec.getAlias() == null ? "ptf_" + inpNum : spec.getAlias());
def.setExpressionTreeString(spec.getAstNode().toStringTree());
def.setTransformsRawInput(tFn.transformsRawInput());
/*
* translate args
*/
List<ASTNode> args = spec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
PTFExpressionDef argDef = null;
try {
argDef = buildExpressionDef(inpDef.getOutputShape(), expr);
} catch (HiveException he) {
throw new SemanticException(he);
}
def.addArg(argDef);
}
}
tFn.initialize(hCfg, ptfDesc, def);
TableFunctionEvaluator tEval = tFn.getEvaluator();
def.setTFunction(tEval);
def.setCarryForwardNames(tFn.carryForwardNames());
tFn.setupRawInputOI();
if (tFn.transformsRawInput()) {
StructObjectInspector rawInOutOI = tEval.getRawInputOI();
List<String> rawInOutColNames = tFn.getRawInputColumnNames();
RowResolver rawInRR = buildRowResolverForPTF(def.getName(), spec.getAlias(), rawInOutOI, rawInOutColNames, inpDef.getOutputShape().getRr());
ShapeDetails rawInpShape = setupTableFnShape(def.getName(), inpDef.getOutputShape(), rawInOutOI, rawInOutColNames, rawInRR);
def.setRawInputShape(rawInpShape);
} else {
def.setRawInputShape(inpDef.getOutputShape());
}
translatePartitioning(def, spec);
tFn.setupOutputOI();
StructObjectInspector outputOI = tEval.getOutputOI();
List<String> outColNames = tFn.getOutputColumnNames();
RowResolver outRR = buildRowResolverForPTF(def.getName(), spec.getAlias(), outputOI, outColNames, def.getRawInputShape().getRr());
ShapeDetails outputShape = setupTableFnShape(def.getName(), inpDef.getOutputShape(), outputOI, outColNames, outRR);
def.setOutputShape(outputShape);
def.setReferencedColumns(tFn.getReferencedColumns());
return def;
}
Aggregations