Search in sources :

Example 16 with PTFExpressionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.

the class PTFOperator method setupKeysWrapper.

protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException {
    PartitionDef pDef = conf.getStartOfChain().getPartition();
    List<PTFExpressionDef> exprs = pDef.getExpressions();
    int numExprs = exprs.size();
    ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs];
    ObjectInspector[] keyOIs = new ObjectInspector[numExprs];
    ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs];
    for (int i = 0; i < numExprs; i++) {
        PTFExpressionDef exprDef = exprs.get(i);
        /*
       * Why cannot we just use the ExprNodeEvaluator on the column?
       * - because on the reduce-side it is initialized based on the rowOI of the HiveTable
       *   and not the OI of the parent of this Operator on the reduce-side
       */
        keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode());
        keyOIs[i] = keyFields[i].initialize(inputOI);
        currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE);
    }
    keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs);
    newKeys = keyWrapperFactory.getKeyWrapper();
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PartitionDef(org.apache.hadoop.hive.ql.plan.ptf.PartitionDef) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef)

Example 17 with PTFExpressionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.

the class Vectorizer method fillInPTFEvaluators.

private static void fillInPTFEvaluators(List<WindowFunctionDef> windowsFunctions, String[] evaluatorFunctionNames, boolean[] evaluatorsAreDistinct, WindowFrameDef[] evaluatorWindowFrameDefs, List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists) throws HiveException {
    final int functionCount = windowsFunctions.size();
    for (int i = 0; i < functionCount; i++) {
        WindowFunctionDef winFunc = windowsFunctions.get(i);
        evaluatorFunctionNames[i] = winFunc.getName();
        evaluatorsAreDistinct[i] = winFunc.isDistinct();
        evaluatorWindowFrameDefs[i] = winFunc.getWindowFrame();
        List<PTFExpressionDef> args = winFunc.getArgs();
        if (args != null) {
            List<ExprNodeDesc> exprNodeDescList = new ArrayList<ExprNodeDesc>();
            for (PTFExpressionDef arg : args) {
                exprNodeDescList.add(arg.getExprNode());
            }
            evaluatorInputExprNodeDescLists[i] = exprNodeDescList;
        }
    }
}
Also used : ArrayList(java.util.ArrayList) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) WindowFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef)

Example 18 with PTFExpressionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.

the class PTFTranslator method setupWdwFnEvaluator.

static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException {
    List<PTFExpressionDef> args = def.getArgs();
    List<ObjectInspector> argOIs = new ArrayList<ObjectInspector>();
    ObjectInspector[] funcArgOIs = null;
    if (args != null) {
        for (PTFExpressionDef arg : args) {
            argOIs.add(arg.getOI());
        }
        funcArgOIs = new ObjectInspector[args.size()];
        funcArgOIs = argOIs.toArray(funcArgOIs);
    }
    GenericUDAFEvaluator wFnEval = FunctionRegistry.getGenericWindowingEvaluator(def.getName(), argOIs, def.isDistinct(), def.isStar(), def.respectNulls());
    ObjectInspector OI = wFnEval.init(GenericUDAFEvaluator.Mode.COMPLETE, funcArgOIs);
    def.setWFnEval(wFnEval);
    def.setOI(OI);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) ArrayList(java.util.ArrayList) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef)

Example 19 with PTFExpressionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.

the class PTFTranslator method translate.

private WindowFunctionDef translate(WindowTableFunctionDef wdwTFnDef, WindowFunctionSpec spec) throws SemanticException {
    WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(spec.getName());
    if (wFnInfo == null) {
        throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(spec.getName()));
    }
    WindowFunctionDef def = new WindowFunctionDef();
    def.setName(spec.getName());
    def.setAlias(spec.getAlias());
    def.setDistinct(spec.isDistinct());
    def.setExpressionTreeString(spec.getExpression().toStringTree());
    def.setStar(spec.isStar());
    def.setPivotResult(wFnInfo.isPivotResult());
    def.setRespectNulls(spec.isRespectNulls());
    ShapeDetails inpShape = wdwTFnDef.getRawInputShape();
    /*
     * translate args
     */
    ArrayList<ASTNode> args = spec.getArgs();
    if (args != null) {
        for (ASTNode expr : args) {
            PTFExpressionDef argDef = null;
            try {
                argDef = buildExpressionDef(inpShape, expr);
            } catch (HiveException he) {
                throw new SemanticException(he);
            }
            def.addArg(argDef);
        }
    }
    if (FunctionRegistry.isRankingFunction(spec.getName())) {
        setupRankingArgs(wdwTFnDef, def, spec);
    }
    WindowSpec wdwSpec = spec.getWindowSpec();
    if (wdwSpec != null) {
        String desc = spec.toString();
        WindowFrameDef wdwFrame = translate(spec.getName(), inpShape, wdwSpec);
        if (!wFnInfo.isSupportsWindow()) {
            BoundarySpec start = wdwSpec.getWindowFrame().getStart();
            if (start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
                throw new SemanticException(String.format("Expecting left window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
            }
            BoundarySpec end = wdwSpec.getWindowFrame().getEnd();
            if (end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
                throw new SemanticException(String.format("Expecting right window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
            }
        }
        def.setWindowFrame(wdwFrame);
    }
    try {
        setupWdwFnEvaluator(def);
    } catch (HiveException he) {
        throw new SemanticException(he);
    }
    return def;
}
Also used : WindowFunctionInfo(org.apache.hadoop.hive.ql.exec.WindowFunctionInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) WindowFrameDef(org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef) WindowFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) BoundarySpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec) WindowSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec)

Example 20 with PTFExpressionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.

the class PTFTranslator method translate.

private PartitionedTableFunctionDef translate(PartitionedTableFunctionSpec spec, PTFInputDef inpDef, int inpNum) throws SemanticException {
    TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(spec.getName());
    if (tFn == null) {
        throw new SemanticException(String.format("Unknown Table Function %s", spec.getName()));
    }
    PartitionedTableFunctionDef def = new PartitionedTableFunctionDef();
    def.setInput(inpDef);
    def.setName(spec.getName());
    def.setResolverClassName(tFn.getClass().getName());
    def.setAlias(spec.getAlias() == null ? "ptf_" + inpNum : spec.getAlias());
    def.setExpressionTreeString(spec.getAstNode().toStringTree());
    def.setTransformsRawInput(tFn.transformsRawInput());
    /*
     * translate args
     */
    List<ASTNode> args = spec.getArgs();
    if (args != null) {
        for (ASTNode expr : args) {
            PTFExpressionDef argDef = null;
            try {
                argDef = buildExpressionDef(inpDef.getOutputShape(), expr);
            } catch (HiveException he) {
                throw new SemanticException(he);
            }
            def.addArg(argDef);
        }
    }
    tFn.initialize(hCfg, ptfDesc, def);
    TableFunctionEvaluator tEval = tFn.getEvaluator();
    def.setTFunction(tEval);
    def.setCarryForwardNames(tFn.carryForwardNames());
    tFn.setupRawInputOI();
    if (tFn.transformsRawInput()) {
        StructObjectInspector rawInOutOI = tEval.getRawInputOI();
        List<String> rawInOutColNames = tFn.getRawInputColumnNames();
        RowResolver rawInRR = buildRowResolverForPTF(def.getName(), spec.getAlias(), rawInOutOI, rawInOutColNames, inpDef.getOutputShape().getRr());
        ShapeDetails rawInpShape = setupTableFnShape(def.getName(), inpDef.getOutputShape(), rawInOutOI, rawInOutColNames, rawInRR);
        def.setRawInputShape(rawInpShape);
    } else {
        def.setRawInputShape(inpDef.getOutputShape());
    }
    translatePartitioning(def, spec);
    tFn.setupOutputOI();
    StructObjectInspector outputOI = tEval.getOutputOI();
    List<String> outColNames = tFn.getOutputColumnNames();
    RowResolver outRR = buildRowResolverForPTF(def.getName(), spec.getAlias(), outputOI, outColNames, def.getRawInputShape().getRr());
    ShapeDetails outputShape = setupTableFnShape(def.getName(), inpDef.getOutputShape(), outputOI, outColNames, outRR);
    def.setOutputShape(outputShape);
    def.setReferencedColumns(tFn.getReferencedColumns());
    return def;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) PartitionedTableFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef) WindowingTableFunctionResolver(org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver) TableFunctionResolver(org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver) TableFunctionEvaluator(org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

PTFExpressionDef (org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef)26 OrderExpressionDef (org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef)12 ArrayList (java.util.ArrayList)7 Test (org.junit.Test)7 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)6 WindowFunctionDef (org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)5 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)5 ShapeDetails (org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails)4 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)4 WindowFrameDef (org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef)3 GenericUDAFEvaluator (org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator)3 TableFunctionEvaluator (org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator)3 WindowingTableFunctionResolver (org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver)3 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)2 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)2 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)2