Search in sources :

Example 6 with ShapeDetails

use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.

the class PTFTranslator method translate.

private PTFQueryInputDef translate(PTFQueryInputSpec spec, int inpNum) throws SemanticException {
    PTFQueryInputDef def = new PTFQueryInputDef();
    StructObjectInspector oi = PTFTranslator.getStandardStructOI(inputRR);
    ShapeDetails shp = setupShape(oi, null, inputRR);
    def.setOutputShape(shp);
    def.setType(spec.getType());
    def.setAlias(spec.getSource() == null ? "ptf_" + inpNum : spec.getSource());
    return def;
}
Also used : ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) PTFQueryInputDef(org.apache.hadoop.hive.ql.plan.ptf.PTFQueryInputDef) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 7 with ShapeDetails

use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.

the class PTFTranslator method setupShape.

private ShapeDetails setupShape(StructObjectInspector OI, List<String> columnNames, RowResolver rr) throws SemanticException {
    Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
    AbstractSerDe serde = null;
    ShapeDetails shp = new ShapeDetails();
    try {
        serde = PTFTranslator.createLazyBinarySerDe(hCfg, OI, serdePropsMap);
        StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serde, OI);
        shp.setOI(outOI);
    } catch (SerDeException se) {
        throw new SemanticException(se);
    }
    shp.setRr(rr);
    shp.setSerde(serde);
    shp.setSerdeClassName(serde.getClass().getName());
    shp.setSerdeProps(serdePropsMap);
    shp.setColumnNames(columnNames);
    TypeCheckCtx tCtx = new TypeCheckCtx(rr);
    tCtx.setUnparseTranslator(unparseT);
    shp.setTypeCheckCtx(tCtx);
    return shp;
}
Also used : ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) LinkedHashMap(java.util.LinkedHashMap) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 8 with ShapeDetails

use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.

the class PTFTranslator method translate.

public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException {
    init(semAly, hCfg, inputRR, unparseT);
    windowingSpec = wdwSpec;
    ptfDesc = new PTFDesc();
    ptfDesc.setCfg(hCfg);
    ptfDesc.setLlInfo(llInfo);
    WindowTableFunctionDef wdwTFnDef = new WindowTableFunctionDef();
    ptfDesc.setFuncDef(wdwTFnDef);
    PTFQueryInputSpec inpSpec = new PTFQueryInputSpec();
    inpSpec.setType(PTFQueryInputType.WINDOWING);
    wdwTFnDef.setInput(translate(inpSpec, 0));
    ShapeDetails inpShape = wdwTFnDef.getInput().getOutputShape();
    WindowingTableFunctionResolver tFn = (WindowingTableFunctionResolver) FunctionRegistry.getTableFunctionResolver(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
    if (tFn == null) {
        throw new SemanticException(String.format("Internal Error: Unknown Table Function %s", FunctionRegistry.WINDOWING_TABLE_FUNCTION));
    }
    wdwTFnDef.setName(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
    wdwTFnDef.setResolverClassName(tFn.getClass().getName());
    wdwTFnDef.setAlias("ptf_" + 1);
    wdwTFnDef.setExpressionTreeString(null);
    wdwTFnDef.setTransformsRawInput(false);
    tFn.initialize(hCfg, ptfDesc, wdwTFnDef);
    TableFunctionEvaluator tEval = tFn.getEvaluator();
    wdwTFnDef.setTFunction(tEval);
    wdwTFnDef.setCarryForwardNames(tFn.carryForwardNames());
    wdwTFnDef.setRawInputShape(inpShape);
    PartitioningSpec partiSpec = wdwSpec.getQueryPartitioningSpec();
    if (partiSpec == null) {
        throw new SemanticException("Invalid use of Windowing: there is no Partitioning associated with Windowing");
    }
    PartitionDef partDef = translate(inpShape, wdwSpec.getQueryPartitionSpec());
    OrderDef ordDef = translate(inpShape, wdwSpec.getQueryOrderSpec(), partDef);
    wdwTFnDef.setPartition(partDef);
    wdwTFnDef.setOrder(ordDef);
    /*
     * process Wdw functions
     */
    ArrayList<WindowFunctionDef> windowFunctions = new ArrayList<WindowFunctionDef>();
    if (wdwSpec.getWindowExpressions() != null) {
        for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
            if (expr instanceof WindowFunctionSpec) {
                WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec) expr);
                windowFunctions.add(wFnDef);
            }
        }
        wdwTFnDef.setWindowFunctions(windowFunctions);
    }
    /*
     * set outputFromWdwFnProcessing
     */
    ArrayList<String> aliases = new ArrayList<String>();
    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
    for (WindowFunctionDef wFnDef : windowFunctions) {
        aliases.add(wFnDef.getAlias());
        if (wFnDef.isPivotResult()) {
            fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
        } else {
            fieldOIs.add(wFnDef.getOI());
        }
    }
    PTFTranslator.addInputColumnsToList(inpShape, aliases, fieldOIs);
    StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
    tFn.setWdwProcessingOutputOI(wdwOutOI);
    RowResolver wdwOutRR = buildRowResolverForWindowing(wdwTFnDef);
    ShapeDetails wdwOutShape = setupShape(wdwOutOI, null, wdwOutRR);
    wdwTFnDef.setOutputShape(wdwOutShape);
    tFn.setupOutputOI();
    PTFDeserializer.alterOutputOIForStreaming(ptfDesc);
    return ptfDesc;
}
Also used : WindowingTableFunctionResolver(org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) WindowTableFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef) ArrayList(java.util.ArrayList) WindowFunctionSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec) PTFQueryInputSpec(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputSpec) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) PartitioningSpec(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec) TableFunctionEvaluator(org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator) PTFDesc(org.apache.hadoop.hive.ql.plan.PTFDesc) PartitionDef(org.apache.hadoop.hive.ql.plan.ptf.PartitionDef) WindowExpressionSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec) OrderDef(org.apache.hadoop.hive.ql.plan.ptf.OrderDef) WindowFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 9 with ShapeDetails

use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.

the class PTFTranslator method translate.

private WindowFunctionDef translate(WindowTableFunctionDef wdwTFnDef, WindowFunctionSpec spec) throws SemanticException {
    WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(spec.getName());
    if (wFnInfo == null) {
        throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(spec.getName()));
    }
    WindowFunctionDef def = new WindowFunctionDef();
    def.setName(spec.getName());
    def.setAlias(spec.getAlias());
    def.setDistinct(spec.isDistinct());
    def.setExpressionTreeString(spec.getExpression().toStringTree());
    def.setStar(spec.isStar());
    def.setPivotResult(wFnInfo.isPivotResult());
    ShapeDetails inpShape = wdwTFnDef.getRawInputShape();
    /*
     * translate args
     */
    ArrayList<ASTNode> args = spec.getArgs();
    if (args != null) {
        for (ASTNode expr : args) {
            PTFExpressionDef argDef = null;
            try {
                argDef = buildExpressionDef(inpShape, expr);
            } catch (HiveException he) {
                throw new SemanticException(he);
            }
            def.addArg(argDef);
        }
    }
    if (FunctionRegistry.isRankingFunction(spec.getName())) {
        setupRankingArgs(wdwTFnDef, def, spec);
    }
    WindowSpec wdwSpec = spec.getWindowSpec();
    if (wdwSpec != null) {
        String desc = spec.toString();
        WindowFrameDef wdwFrame = translate(spec.getName(), inpShape, wdwSpec);
        if (!wFnInfo.isSupportsWindow()) {
            BoundarySpec start = wdwSpec.getWindowFrame().getStart();
            if (start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
                throw new SemanticException(String.format("Expecting left window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
            }
            BoundarySpec end = wdwSpec.getWindowFrame().getEnd();
            if (end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
                throw new SemanticException(String.format("Expecting right window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
            }
        }
        def.setWindowFrame(wdwFrame);
    }
    try {
        setupWdwFnEvaluator(def);
    } catch (HiveException he) {
        throw new SemanticException(he);
    }
    return def;
}
Also used : WindowFunctionInfo(org.apache.hadoop.hive.ql.exec.WindowFunctionInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) WindowFrameDef(org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef) WindowFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) BoundarySpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec) WindowSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec)

Example 10 with ShapeDetails

use of org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails in project hive by apache.

the class PTFDeserializer method initialize.

protected void initialize(PartitionedTableFunctionDef def) throws HiveException {
    ShapeDetails inpShape = def.getInput().getOutputShape();
    /*
     * 1. initialize args
     */
    if (def.getArgs() != null) {
        for (PTFExpressionDef arg : def.getArgs()) {
            initialize(arg, inpShape);
        }
    }
    /*
     * 2. setup resolve, make connections
     */
    TableFunctionEvaluator tEval = def.getTFunction();
    // TableFunctionResolver tResolver = FunctionRegistry.getTableFunctionResolver(def.getName());
    TableFunctionResolver tResolver = constructResolver(def.getResolverClassName());
    tResolver.initialize(ptfDesc, def, tEval);
    /*
     * 3. give Evaluator chance to setup for RawInput execution; setup RawInput shape
     */
    if (tEval.isTransformsRawInput()) {
        tResolver.initializeRawInputOI();
        initialize(def.getRawInputShape(), tEval.getRawInputOI());
    } else {
        def.setRawInputShape(inpShape);
    }
    inpShape = def.getRawInputShape();
    /*
     * 4. give Evaluator chance to setup for Output execution; setup Output shape.
     */
    tResolver.initializeOutputOI();
    initialize(def.getOutputShape(), tEval.getOutputOI());
}
Also used : WindowingTableFunctionResolver(org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver) TableFunctionResolver(org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver) TableFunctionEvaluator(org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails)

Aggregations

ShapeDetails (org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails)10 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)5 PTFExpressionDef (org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef)4 TableFunctionEvaluator (org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator)4 WindowingTableFunctionResolver (org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver)4 WindowFunctionDef (org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef)3 ArrayList (java.util.ArrayList)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 WindowFrameDef (org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef)2 TableFunctionResolver (org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver)2 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)2 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)2 LinkedHashMap (java.util.LinkedHashMap)1 WindowFunctionInfo (org.apache.hadoop.hive.ql.exec.WindowFunctionInfo)1 PTFQueryInputSpec (org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputSpec)1 PartitioningSpec (org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec)1 BoundarySpec (org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec)1 WindowExpressionSpec (org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec)1 WindowFunctionSpec (org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec)1 WindowSpec (org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec)1