Search in sources :

Example 1 with PartitionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.

the class PTFTranslator method translate.

private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec) throws SemanticException {
    if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) {
        return null;
    }
    PartitionDef pDef = new PartitionDef();
    for (PartitionExpression pExpr : spec.getExpressions()) {
        PTFExpressionDef expDef = translate(inpShape, pExpr);
        pDef.addExpression(expDef);
    }
    return pDef;
}
Also used : PartitionExpression(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionExpression) PartitionDef(org.apache.hadoop.hive.ql.plan.ptf.PartitionDef) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef)

Example 2 with PartitionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.

the class PTFOperator method setupKeysWrapper.

protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException {
    PartitionDef pDef = conf.getStartOfChain().getPartition();
    List<PTFExpressionDef> exprs = pDef.getExpressions();
    int numExprs = exprs.size();
    ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs];
    ObjectInspector[] keyOIs = new ObjectInspector[numExprs];
    ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs];
    for (int i = 0; i < numExprs; i++) {
        PTFExpressionDef exprDef = exprs.get(i);
        /*
       * Why cannot we just use the ExprNodeEvaluator on the column?
       * - because on the reduce-side it is initialized based on the rowOI of the HiveTable
       *   and not the OI of the parent of this Operator on the reduce-side
       */
        keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode());
        keyOIs[i] = keyFields[i].initialize(inputOI);
        currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE);
    }
    keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs);
    newKeys = keyWrapperFactory.getKeyWrapper();
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PartitionDef(org.apache.hadoop.hive.ql.plan.ptf.PartitionDef) PTFExpressionDef(org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef)

Example 3 with PartitionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.

the class PTFTranslator method translatePartitioning.

private void translatePartitioning(PartitionedTableFunctionDef def, PartitionedTableFunctionSpec spec) throws SemanticException {
    applyConstantPartition(spec);
    if (spec.getPartition() == null) {
        return;
    }
    PartitionDef partDef = translate(def.getRawInputShape(), spec.getPartition());
    OrderDef orderDef = translate(def.getRawInputShape(), spec.getOrder(), partDef);
    def.setPartition(partDef);
    def.setOrder(orderDef);
}
Also used : PartitionDef(org.apache.hadoop.hive.ql.plan.ptf.PartitionDef) OrderDef(org.apache.hadoop.hive.ql.plan.ptf.OrderDef)

Example 4 with PartitionDef

use of org.apache.hadoop.hive.ql.plan.ptf.PartitionDef in project hive by apache.

the class PTFTranslator method translate.

public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException {
    init(semAly, hCfg, inputRR, unparseT);
    windowingSpec = wdwSpec;
    ptfDesc = new PTFDesc();
    ptfDesc.setCfg(hCfg);
    ptfDesc.setLlInfo(llInfo);
    WindowTableFunctionDef wdwTFnDef = new WindowTableFunctionDef();
    ptfDesc.setFuncDef(wdwTFnDef);
    PTFQueryInputSpec inpSpec = new PTFQueryInputSpec();
    inpSpec.setType(PTFQueryInputType.WINDOWING);
    wdwTFnDef.setInput(translate(inpSpec, 0));
    ShapeDetails inpShape = wdwTFnDef.getInput().getOutputShape();
    WindowingTableFunctionResolver tFn = (WindowingTableFunctionResolver) FunctionRegistry.getTableFunctionResolver(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
    if (tFn == null) {
        throw new SemanticException(String.format("Internal Error: Unknown Table Function %s", FunctionRegistry.WINDOWING_TABLE_FUNCTION));
    }
    wdwTFnDef.setName(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
    wdwTFnDef.setResolverClassName(tFn.getClass().getName());
    wdwTFnDef.setAlias("ptf_" + 1);
    wdwTFnDef.setExpressionTreeString(null);
    wdwTFnDef.setTransformsRawInput(false);
    tFn.initialize(hCfg, ptfDesc, wdwTFnDef);
    TableFunctionEvaluator tEval = tFn.getEvaluator();
    wdwTFnDef.setTFunction(tEval);
    wdwTFnDef.setCarryForwardNames(tFn.carryForwardNames());
    wdwTFnDef.setRawInputShape(inpShape);
    PartitioningSpec partiSpec = wdwSpec.getQueryPartitioningSpec();
    if (partiSpec == null) {
        throw new SemanticException("Invalid use of Windowing: there is no Partitioning associated with Windowing");
    }
    PartitionDef partDef = translate(inpShape, wdwSpec.getQueryPartitionSpec());
    OrderDef ordDef = translate(inpShape, wdwSpec.getQueryOrderSpec(), partDef);
    wdwTFnDef.setPartition(partDef);
    wdwTFnDef.setOrder(ordDef);
    /*
     * process Wdw functions
     */
    ArrayList<WindowFunctionDef> windowFunctions = new ArrayList<WindowFunctionDef>();
    if (wdwSpec.getWindowExpressions() != null) {
        for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
            if (expr instanceof WindowFunctionSpec) {
                WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec) expr);
                windowFunctions.add(wFnDef);
            }
        }
        wdwTFnDef.setWindowFunctions(windowFunctions);
    }
    /*
     * set outputFromWdwFnProcessing
     */
    ArrayList<String> aliases = new ArrayList<String>();
    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
    for (WindowFunctionDef wFnDef : windowFunctions) {
        aliases.add(wFnDef.getAlias());
        if (wFnDef.isPivotResult()) {
            fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
        } else {
            fieldOIs.add(wFnDef.getOI());
        }
    }
    PTFTranslator.addInputColumnsToList(inpShape, aliases, fieldOIs);
    StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
    tFn.setWdwProcessingOutputOI(wdwOutOI);
    RowResolver wdwOutRR = buildRowResolverForWindowing(wdwTFnDef);
    ShapeDetails wdwOutShape = setupShape(wdwOutOI, null, wdwOutRR);
    wdwTFnDef.setOutputShape(wdwOutShape);
    tFn.setupOutputOI();
    PTFDeserializer.alterOutputOIForStreaming(ptfDesc);
    return ptfDesc;
}
Also used : WindowingTableFunctionResolver(org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) WindowTableFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef) ArrayList(java.util.ArrayList) WindowFunctionSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec) PTFQueryInputSpec(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputSpec) ShapeDetails(org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails) PartitioningSpec(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec) TableFunctionEvaluator(org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator) PTFDesc(org.apache.hadoop.hive.ql.plan.PTFDesc) PartitionDef(org.apache.hadoop.hive.ql.plan.ptf.PartitionDef) WindowExpressionSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec) OrderDef(org.apache.hadoop.hive.ql.plan.ptf.OrderDef) WindowFunctionDef(org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

PartitionDef (org.apache.hadoop.hive.ql.plan.ptf.PartitionDef)4 OrderDef (org.apache.hadoop.hive.ql.plan.ptf.OrderDef)2 PTFExpressionDef (org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef)2 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)2 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)2 ArrayList (java.util.ArrayList)1 PTFQueryInputSpec (org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputSpec)1 PartitionExpression (org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionExpression)1 PartitioningSpec (org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec)1 WindowExpressionSpec (org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec)1 WindowFunctionSpec (org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec)1 PTFDesc (org.apache.hadoop.hive.ql.plan.PTFDesc)1 ShapeDetails (org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails)1 WindowFunctionDef (org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef)1 WindowTableFunctionDef (org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef)1 TableFunctionEvaluator (org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator)1 WindowingTableFunctionResolver (org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver)1 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)1 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)1