use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFTranslator method translate.
private PTFQueryInputDef translate(PTFQueryInputSpec spec, int inpNum) throws SemanticException {
PTFQueryInputDef def = new PTFQueryInputDef();
StructObjectInspector oi = PTFTranslator.getStandardStructOI(inputRR);
ShapeDetails shp = setupShape(oi, null, inputRR);
def.setOutputShape(shp);
def.setType(spec.getType());
def.setAlias(spec.getSource() == null ? "ptf_" + inpNum : spec.getSource());
return def;
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFTranslator method setupShape.
private ShapeDetails setupShape(StructObjectInspector OI, List<String> columnNames, RowResolver rr) throws SemanticException {
Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
AbstractSerDe serde = null;
ShapeDetails shp = new ShapeDetails();
try {
serde = PTFTranslator.createLazyBinarySerDe(hCfg, OI, serdePropsMap);
StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serde, OI);
shp.setOI(outOI);
} catch (SerDeException se) {
throw new SemanticException(se);
}
shp.setRr(rr);
shp.setSerde(serde);
shp.setSerdeClassName(serde.getClass().getName());
shp.setSerdeProps(serdePropsMap);
shp.setColumnNames(columnNames);
TypeCheckCtx tCtx = new TypeCheckCtx(rr);
tCtx.setUnparseTranslator(unparseT);
shp.setTypeCheckCtx(tCtx);
return shp;
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFTranslator method translate.
public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException {
init(semAly, hCfg, inputRR, unparseT);
windowingSpec = wdwSpec;
ptfDesc = new PTFDesc();
ptfDesc.setCfg(hCfg);
ptfDesc.setLlInfo(llInfo);
WindowTableFunctionDef wdwTFnDef = new WindowTableFunctionDef();
ptfDesc.setFuncDef(wdwTFnDef);
PTFQueryInputSpec inpSpec = new PTFQueryInputSpec();
inpSpec.setType(PTFQueryInputType.WINDOWING);
wdwTFnDef.setInput(translate(inpSpec, 0));
ShapeDetails inpShape = wdwTFnDef.getInput().getOutputShape();
WindowingTableFunctionResolver tFn = (WindowingTableFunctionResolver) FunctionRegistry.getTableFunctionResolver(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
if (tFn == null) {
throw new SemanticException(String.format("Internal Error: Unknown Table Function %s", FunctionRegistry.WINDOWING_TABLE_FUNCTION));
}
wdwTFnDef.setName(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
wdwTFnDef.setResolverClassName(tFn.getClass().getName());
wdwTFnDef.setAlias("ptf_" + 1);
wdwTFnDef.setExpressionTreeString(null);
wdwTFnDef.setTransformsRawInput(false);
tFn.initialize(hCfg, ptfDesc, wdwTFnDef);
TableFunctionEvaluator tEval = tFn.getEvaluator();
wdwTFnDef.setTFunction(tEval);
wdwTFnDef.setCarryForwardNames(tFn.carryForwardNames());
wdwTFnDef.setRawInputShape(inpShape);
PartitioningSpec partiSpec = wdwSpec.getQueryPartitioningSpec();
if (partiSpec == null) {
throw new SemanticException("Invalid use of Windowing: there is no Partitioning associated with Windowing");
}
PartitionDef partDef = translate(inpShape, wdwSpec.getQueryPartitionSpec());
OrderDef ordDef = translate(inpShape, wdwSpec.getQueryOrderSpec(), partDef);
wdwTFnDef.setPartition(partDef);
wdwTFnDef.setOrder(ordDef);
/*
* process Wdw functions
*/
ArrayList<WindowFunctionDef> windowFunctions = new ArrayList<WindowFunctionDef>();
if (wdwSpec.getWindowExpressions() != null) {
for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
if (expr instanceof WindowFunctionSpec) {
WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec) expr);
windowFunctions.add(wFnDef);
}
}
wdwTFnDef.setWindowFunctions(windowFunctions);
}
/*
* set outputFromWdwFnProcessing
*/
ArrayList<String> aliases = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (WindowFunctionDef wFnDef : windowFunctions) {
aliases.add(wFnDef.getAlias());
if (wFnDef.isPivotResult()) {
fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
} else {
fieldOIs.add(wFnDef.getOI());
}
}
PTFTranslator.addInputColumnsToList(inpShape, aliases, fieldOIs);
StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
tFn.setWdwProcessingOutputOI(wdwOutOI);
RowResolver wdwOutRR = buildRowResolverForWindowing(wdwTFnDef);
ShapeDetails wdwOutShape = setupShape(wdwOutOI, null, wdwOutRR);
wdwTFnDef.setOutputShape(wdwOutShape);
tFn.setupOutputOI();
PTFDeserializer.alterOutputOIForStreaming(ptfDesc);
return ptfDesc;
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFTranslator method translate.
private WindowFunctionDef translate(WindowTableFunctionDef wdwTFnDef, WindowFunctionSpec spec) throws SemanticException {
WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(spec.getName());
if (wFnInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(spec.getName()));
}
WindowFunctionDef def = new WindowFunctionDef();
def.setName(spec.getName());
def.setAlias(spec.getAlias());
def.setDistinct(spec.isDistinct());
def.setExpressionTreeString(spec.getExpression().toStringTree());
def.setStar(spec.isStar());
def.setPivotResult(wFnInfo.isPivotResult());
ShapeDetails inpShape = wdwTFnDef.getRawInputShape();
/*
* translate args
*/
ArrayList<ASTNode> args = spec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
PTFExpressionDef argDef = null;
try {
argDef = buildExpressionDef(inpShape, expr);
} catch (HiveException he) {
throw new SemanticException(he);
}
def.addArg(argDef);
}
}
if (FunctionRegistry.isRankingFunction(spec.getName())) {
setupRankingArgs(wdwTFnDef, def, spec);
}
WindowSpec wdwSpec = spec.getWindowSpec();
if (wdwSpec != null) {
String desc = spec.toString();
WindowFrameDef wdwFrame = translate(spec.getName(), inpShape, wdwSpec);
if (!wFnInfo.isSupportsWindow()) {
BoundarySpec start = wdwSpec.getWindowFrame().getStart();
if (start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
throw new SemanticException(String.format("Expecting left window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
}
BoundarySpec end = wdwSpec.getWindowFrame().getEnd();
if (end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
throw new SemanticException(String.format("Expecting right window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
}
}
def.setWindowFrame(wdwFrame);
}
try {
setupWdwFnEvaluator(def);
} catch (HiveException he) {
throw new SemanticException(he);
}
return def;
}
use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.
the class PTFDeserializer method initialize.
protected void initialize(PartitionedTableFunctionDef def) throws HiveException {
ShapeDetails inpShape = def.getInput().getOutputShape();
/*
* 1. initialize args
*/
if (def.getArgs() != null) {
for (PTFExpressionDef arg : def.getArgs()) {
initialize(arg, inpShape);
}
}
/*
* 2. setup resolve, make connections
*/
TableFunctionEvaluator tEval = def.getTFunction();
// TableFunctionResolver tResolver = FunctionRegistry.getTableFunctionResolver(def.getName());
TableFunctionResolver tResolver = constructResolver(def.getResolverClassName());
tResolver.initialize(ptfDesc, def, tEval);
/*
* 3. give Evaluator chance to setup for RawInput execution; setup RawInput shape
*/
if (tEval.isTransformsRawInput()) {
tResolver.initializeRawInputOI();
initialize(def.getRawInputShape(), tEval.getRawInputOI());
} else {
def.setRawInputShape(inpShape);
}
inpShape = def.getRawInputShape();
/*
* 4. give Evaluator chance to setup for Output execution; setup Output shape.
*/
tResolver.initializeOutputOI();
initialize(def.getOutputShape(), tEval.getOutputOI());
}
Aggregations