Search in sources :

Example 56 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ExplainSQRewriteTask method execute.

@Override
public int execute(DriverContext driverContext) {
    PrintStream out = null;
    try {
        Path resFile = new Path(work.getResFile());
        OutputStream outS = resFile.getFileSystem(conf).create(resFile);
        out = new PrintStream(outS);
        QB qb = work.getQb();
        TokenRewriteStream stream = work.getCtx().getTokenRewriteStream();
        String program = "sq rewrite";
        ASTNode ast = work.getAst();
        try {
            addRewrites(stream, qb, program, out);
            out.println("\nRewritten Query:\n" + stream.toString(program, ast.getTokenStartIndex(), ast.getTokenStopIndex()));
        } finally {
            stream.deleteProgram(program);
        }
        out.close();
        out = null;
        return (0);
    } catch (Exception e) {
        console.printError("Failed with exception " + e.getMessage(), "\n" + StringUtils.stringifyException(e));
        return (1);
    } finally {
        IOUtils.closeStream(out);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) PrintStream(java.io.PrintStream) QB(org.apache.hadoop.hive.ql.parse.QB) TokenRewriteStream(org.antlr.runtime.TokenRewriteStream) OutputStream(java.io.OutputStream) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 57 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class RewriteParseContextGenerator method doSemanticAnalysis.

/**
   * For the input ASTNode tree, perform a semantic analysis and check metadata
   * Generate a operator tree and return it.
   *
   * @param ctx
   * @param sem
   * @param ast
   * @return
   * @throws SemanticException
   */
private static Operator<?> doSemanticAnalysis(SemanticAnalyzer sem, ASTNode ast, Context ctx) throws SemanticException {
    QB qb = new QB(null, null, false);
    ASTNode child = ast;
    ParseContext subPCtx = sem.getParseContext();
    subPCtx.setContext(ctx);
    sem.initParseCtx(subPCtx);
    LOG.info("Starting Sub-query Semantic Analysis");
    sem.doPhase1(child, qb, sem.initPhase1Ctx(), null);
    LOG.info("Completed phase 1 of Sub-query Semantic Analysis");
    sem.getMetaData(qb);
    LOG.info("Completed getting MetaData in Sub-query Semantic Analysis");
    LOG.info("Sub-query Abstract syntax tree: " + ast.toStringTree());
    Operator<?> operator = sem.genPlan(qb);
    LOG.info("Sub-query Completed plan generation");
    return operator;
}
Also used : QB(org.apache.hadoop.hive.ql.parse.QB) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ParseContext(org.apache.hadoop.hive.ql.parse.ParseContext)

Example 58 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.

the class ResultExpressionParser method buildSelectListEvaluators.

private void buildSelectListEvaluators() throws WindowingException {
    selectListExprEvaluators = new ArrayList<ExprNodeEvaluator>();
    selectListExprOIs = new ArrayList<ObjectInspector>();
    ArrayList<String> selectListExprNames = new ArrayList<String>();
    int i = 0;
    Iterator<Object> it = selectSpec.getColumnListAndAlias();
    while (it.hasNext()) {
        Object[] selectColDetails = (Object[]) it.next();
        String selectColName = (String) selectColDetails[1];
        ASTNode selectColumnNode = (ASTNode) selectColDetails[2];
        ExprNodeDesc selectColumnExprNode = TranslateUtils.buildExprNode(selectColumnNode, selectListInputTypeCheckCtx);
        ExprNodeEvaluator selectColumnExprEval = ExprNodeEvaluatorFactory.get(selectColumnExprNode);
        ObjectInspector selectColumnOI = null;
        try {
            selectColumnOI = selectColumnExprEval.initialize(selectListInputOI);
        } catch (HiveException he) {
            throw new WindowingException(he);
        }
        selectColName = getColumnName(selectColName, selectColumnExprNode, i);
        selectListExprEvaluators.add(selectColumnExprEval);
        selectListExprOIs.add(selectColumnOI);
        selectListExprNames.add(selectColName);
        i++;
    }
    selectListOutputOI = ObjectInspectorFactory.getStandardStructObjectInspector(selectListExprNames, selectListExprOIs);
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) ArrayList(java.util.ArrayList) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) WindowingException(com.sap.hadoop.windowing.WindowingException) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 59 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.

the class OutputTranslation method translateSelectExprs.

public static void translateSelectExprs(QueryDef qDef) throws WindowingException {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    QueryInputDef iDef = qDef.getInput();
    InputInfo iInfo = tInfo.getInputInfo(iDef);
    SelectDef selectDef = qDef.getSelectList();
    SelectSpec selectSpec = qDef.getSpec().getSelectList();
    Iterator<Object> selectExprsAndAliases = selectSpec.getColumnListAndAlias();
    int i = 0;
    ColumnDef cDef = null;
    while (selectExprsAndAliases.hasNext()) {
        Object[] o = (Object[]) selectExprsAndAliases.next();
        boolean isWnFn = ((Boolean) o[0]).booleanValue();
        if (isWnFn) {
            cDef = translateWindowFnAlias(qDef, iInfo, i++, (String) o[1]);
        } else {
            cDef = translateSelectExpr(qDef, iInfo, i++, (String) o[1], (ASTNode) o[2]);
        }
        selectDef.addColumn(cDef);
    }
    TranslateUtils.setupSelectOI(selectDef);
}
Also used : SelectDef(com.sap.hadoop.windowing.query2.definition.SelectDef) ColumnDef(com.sap.hadoop.windowing.query2.definition.ColumnDef) SelectSpec(com.sap.hadoop.windowing.query2.specification.SelectSpec) InputInfo(com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo) QueryInputDef(com.sap.hadoop.windowing.query2.definition.QueryInputDef) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 60 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveGBOpConvUtil method getGBInfo.

// For each of the GB op in the logical GB this should be called seperately;
// otherwise GBevaluator and expr nodes may get shared among multiple GB ops
private static GBInfo getGBInfo(HiveAggregate aggRel, OpAttr inputOpAf, HiveConf hc) throws SemanticException {
    GBInfo gbInfo = new GBInfo();
    // 0. Collect AggRel output col Names
    gbInfo.outputColNames.addAll(aggRel.getRowType().getFieldNames());
    // 1. Collect GB Keys
    RelNode aggInputRel = aggRel.getInput();
    ExprNodeConverter exprConv = new ExprNodeConverter(inputOpAf.tabAlias, aggInputRel.getRowType(), new HashSet<Integer>(), aggRel.getCluster().getTypeFactory(), true);
    ExprNodeDesc tmpExprNodeDesc;
    for (int i : aggRel.getGroupSet()) {
        RexInputRef iRef = new RexInputRef(i, aggInputRel.getRowType().getFieldList().get(i).getType());
        tmpExprNodeDesc = iRef.accept(exprConv);
        gbInfo.gbKeys.add(tmpExprNodeDesc);
        gbInfo.gbKeyColNamesInInput.add(aggInputRel.getRowType().getFieldNames().get(i));
        gbInfo.gbKeyTypes.add(tmpExprNodeDesc.getTypeInfo());
    }
    // 2. Collect Grouping Set info
    if (aggRel.getGroupType() != Group.SIMPLE) {
        // 2.1 Translate Grouping set col bitset
        ImmutableList<ImmutableBitSet> lstGrpSet = aggRel.getGroupSets();
        long bitmap = 0;
        for (ImmutableBitSet grpSet : lstGrpSet) {
            bitmap = 0;
            for (Integer bitIdx : grpSet.asList()) {
                bitmap = SemanticAnalyzer.setBit(bitmap, bitIdx);
            }
            gbInfo.grpSets.add(bitmap);
        }
        Collections.sort(gbInfo.grpSets);
        // 2.2 Check if GRpSet require additional MR Job
        gbInfo.grpSetRqrAdditionalMRJob = gbInfo.grpSets.size() > hc.getIntVar(HiveConf.ConfVars.HIVE_NEW_JOB_GROUPING_SET_CARDINALITY);
        // 2.3 Check if GROUPING_ID needs to be projected out
        if (!aggRel.getAggCallList().isEmpty() && (aggRel.getAggCallList().get(aggRel.getAggCallList().size() - 1).getAggregation() == HiveGroupingID.INSTANCE)) {
            gbInfo.grpIdFunctionNeeded = true;
        }
    }
    // 3. Walk through UDAF & Collect Distinct Info
    Set<Integer> distinctRefs = new HashSet<Integer>();
    Map<Integer, Integer> distParamInRefsToOutputPos = new HashMap<Integer, Integer>();
    for (AggregateCall aggCall : aggRel.getAggCallList()) {
        if ((aggCall.getAggregation() == HiveGroupingID.INSTANCE) || !aggCall.isDistinct()) {
            continue;
        }
        List<Integer> argLst = new ArrayList<Integer>(aggCall.getArgList());
        List<String> argNames = HiveCalciteUtil.getFieldNames(argLst, aggInputRel);
        ExprNodeDesc distinctExpr;
        for (int i = 0; i < argLst.size(); i++) {
            if (!distinctRefs.contains(argLst.get(i))) {
                distinctRefs.add(argLst.get(i));
                distinctExpr = HiveCalciteUtil.getExprNode(argLst.get(i), aggInputRel, exprConv);
                // Only distinct nodes that are NOT part of the key should be added to distExprNodes
                if (ExprNodeDescUtils.indexOf(distinctExpr, gbInfo.gbKeys) < 0) {
                    distParamInRefsToOutputPos.put(argLst.get(i), gbInfo.distExprNodes.size());
                    gbInfo.distExprNodes.add(distinctExpr);
                    gbInfo.distExprNames.add(argNames.get(i));
                    gbInfo.distExprTypes.add(distinctExpr.getTypeInfo());
                }
            }
        }
    }
    // 4. Walk through UDAF & Collect UDAF Info
    Set<Integer> deDupedNonDistIrefsSet = new HashSet<Integer>();
    for (AggregateCall aggCall : aggRel.getAggCallList()) {
        if (aggCall.getAggregation() == HiveGroupingID.INSTANCE) {
            continue;
        }
        UDAFAttrs udafAttrs = new UDAFAttrs();
        List<ExprNodeDesc> argExps = HiveCalciteUtil.getExprNodes(aggCall.getArgList(), aggInputRel, inputOpAf.tabAlias);
        udafAttrs.udafParams.addAll(argExps);
        udafAttrs.udafName = aggCall.getAggregation().getName();
        udafAttrs.argList = aggCall.getArgList();
        udafAttrs.isDistinctUDAF = aggCall.isDistinct();
        List<Integer> argLst = new ArrayList<Integer>(aggCall.getArgList());
        List<Integer> distColIndicesOfUDAF = new ArrayList<Integer>();
        List<Integer> distUDAFParamsIndxInDistExprs = new ArrayList<Integer>();
        for (int i = 0; i < argLst.size(); i++) {
            // NOTE: distinct expr can be part of of GB key
            if (udafAttrs.isDistinctUDAF) {
                ExprNodeDesc argExpr = argExps.get(i);
                Integer found = ExprNodeDescUtils.indexOf(argExpr, gbInfo.gbKeys);
                distColIndicesOfUDAF.add(found < 0 ? distParamInRefsToOutputPos.get(argLst.get(i)) + gbInfo.gbKeys.size() + (gbInfo.grpSets.size() > 0 ? 1 : 0) : found);
                distUDAFParamsIndxInDistExprs.add(distParamInRefsToOutputPos.get(argLst.get(i)));
            } else {
                // TODO: this seems wrong (following what Hive Regular does)
                if (!distParamInRefsToOutputPos.containsKey(argLst.get(i)) && !deDupedNonDistIrefsSet.contains(argLst.get(i))) {
                    deDupedNonDistIrefsSet.add(argLst.get(i));
                    gbInfo.deDupedNonDistIrefs.add(udafAttrs.udafParams.get(i));
                }
            }
        }
        if (udafAttrs.isDistinctUDAF) {
            gbInfo.containsDistinctAggr = true;
            udafAttrs.udafParamsIndxInGBInfoDistExprs = distUDAFParamsIndxInDistExprs;
            gbInfo.distColIndices.add(distColIndicesOfUDAF);
        }
        // special handling for count, similar to PlanModifierForASTConv::replaceEmptyGroupAggr()
        udafAttrs.udafEvaluator = SemanticAnalyzer.getGenericUDAFEvaluator(udafAttrs.udafName, new ArrayList<ExprNodeDesc>(udafAttrs.udafParams), new ASTNode(), udafAttrs.isDistinctUDAF, udafAttrs.udafParams.size() == 0 && "count".equalsIgnoreCase(udafAttrs.udafName) ? true : false);
        gbInfo.udafAttrs.add(udafAttrs);
    }
    // 4. Gather GB Memory threshold
    gbInfo.groupByMemoryUsage = HiveConf.getFloatVar(hc, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
    gbInfo.memoryThreshold = HiveConf.getFloatVar(hc, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
    // 5. Gather GB Physical pipeline (based on user config & Grping Sets size)
    gbInfo.gbPhysicalPipelineMode = getAggOPMode(hc, gbInfo);
    return gbInfo;
}
Also used : ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) AggregateCall(org.apache.calcite.rel.core.AggregateCall) RelNode(org.apache.calcite.rel.RelNode) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) RexInputRef(org.apache.calcite.rex.RexInputRef) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HashSet(java.util.HashSet)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4