Search in sources :

Example 36 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class GenMRRedSink2 method process.

/**
 * Reduce Scan encountered.
 *
 * @param nd
 *          the reduce sink operator encountered
 * @param opProcCtx
 *          context
 */
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException {
    ReduceSinkOperator op = (ReduceSinkOperator) nd;
    GenMRProcContext ctx = (GenMRProcContext) opProcCtx;
    Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    String currAliasId = mapredCtx.getCurrAliasId();
    Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    Map<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx.getOpTaskMap();
    Task<? extends Serializable> oldTask = opTaskMap.get(reducer);
    ctx.setCurrAliasId(currAliasId);
    ctx.setCurrTask(currTask);
    if (oldTask == null) {
        GenMapRedUtils.splitPlan(op, ctx);
    } else {
        GenMapRedUtils.splitPlan(op, currTask, oldTask, ctx);
        currTask = oldTask;
        ctx.setCurrTask(currTask);
    }
    mapCurrCtx.put(op, new GenMapRedCtx(ctx.getCurrTask(), ctx.getCurrAliasId()));
    if (GenMapRedUtils.hasBranchFinished(nodeOutputs)) {
        ctx.addRootIfPossible(currTask);
        return false;
    }
    return true;
}
Also used : Operator(org.apache.hadoop.hive.ql.exec.Operator) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) GenMapRedCtx(org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 37 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class GenMRRedSink3 method process.

/**
 * Reduce Scan encountered.
 *
 * @param nd
 *          the reduce sink operator encountered
 * @param opProcCtx
 *          context
 */
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException {
    ReduceSinkOperator op = (ReduceSinkOperator) nd;
    GenMRProcContext ctx = (GenMRProcContext) opProcCtx;
    // union consisted on a bunch of map-reduce jobs, and it has been split at
    // the union
    Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    UnionOperator union = Utils.findNode(stack, UnionOperator.class);
    assert union != null;
    Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(union);
    Task<? extends Serializable> unionTask = null;
    if (mapredCtx != null) {
        unionTask = mapredCtx.getCurrTask();
    } else {
        unionTask = ctx.getCurrTask();
    }
    MapredWork plan = (MapredWork) unionTask.getWork();
    HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx.getOpTaskMap();
    Task<? extends Serializable> reducerTask = opTaskMap.get(reducer);
    ctx.setCurrTask(unionTask);
    // If the plan for this reducer does not exist, initialize the plan
    if (reducerTask == null) {
        // When the reducer is encountered for the first time
        if (plan.getReduceWork() == null) {
            GenMapRedUtils.initUnionPlan(op, union, ctx, unionTask);
        // When union is followed by a multi-table insert
        } else {
            GenMapRedUtils.splitPlan(op, ctx);
        }
    } else if (plan.getReduceWork() != null && plan.getReduceWork().getReducer() == reducer) {
        // The union is already initialized. However, the union is walked from
        // another input
        // initUnionPlan is idempotent
        GenMapRedUtils.initUnionPlan(op, union, ctx, unionTask);
    } else {
        GenMapRedUtils.joinUnionPlan(ctx, union, unionTask, reducerTask, false);
        ctx.setCurrTask(reducerTask);
    }
    mapCurrCtx.put(op, new GenMapRedCtx(ctx.getCurrTask(), ctx.getCurrAliasId()));
    // the union operator has been processed
    ctx.setCurrUnionOp(null);
    return true;
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) UnionOperator(org.apache.hadoop.hive.ql.exec.UnionOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) UnionOperator(org.apache.hadoop.hive.ql.exec.UnionOperator) MapredWork(org.apache.hadoop.hive.ql.plan.MapredWork) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) GenMapRedCtx(org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 38 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class TestTezTask method setUp.

@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
    utils = mock(DagUtils.class);
    fs = mock(FileSystem.class);
    path = mock(Path.class);
    when(path.getFileSystem(any(Configuration.class))).thenReturn(fs);
    when(utils.getTezDir(any(Path.class))).thenReturn(path);
    when(utils.createVertex(any(JobConf.class), any(BaseWork.class), any(Path.class), any(FileSystem.class), any(Context.class), anyBoolean(), any(TezWork.class), any(VertexType.class), any(Map.class))).thenAnswer(new Answer<Vertex>() {

        @Override
        public Vertex answer(InvocationOnMock invocation) throws Throwable {
            Object[] args = invocation.getArguments();
            return Vertex.create(((BaseWork) args[1]).getName(), mock(ProcessorDescriptor.class), 0, mock(Resource.class));
        }
    });
    when(utils.createEdge(any(JobConf.class), any(Vertex.class), any(Vertex.class), any(TezEdgeProperty.class), any(BaseWork.class), any(TezWork.class))).thenAnswer(new Answer<Edge>() {

        @Override
        public Edge answer(InvocationOnMock invocation) throws Throwable {
            Object[] args = invocation.getArguments();
            return Edge.create((Vertex) args[1], (Vertex) args[2], mock(EdgeProperty.class));
        }
    });
    work = new TezWork("", null);
    mws = new MapWork[] { new MapWork(), new MapWork() };
    rws = new ReduceWork[] { new ReduceWork(), new ReduceWork() };
    work.addAll(mws);
    work.addAll(rws);
    int i = 0;
    for (BaseWork w : work.getAllWork()) {
        w.setName("Work " + (++i));
    }
    op = mock(Operator.class);
    LinkedHashMap<String, Operator<? extends OperatorDesc>> map = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
    map.put("foo", op);
    mws[0].setAliasToWork(map);
    mws[1].setAliasToWork(map);
    LinkedHashMap<Path, ArrayList<String>> pathMap = new LinkedHashMap<>();
    ArrayList<String> aliasList = new ArrayList<String>();
    aliasList.add("foo");
    pathMap.put(new Path("foo"), aliasList);
    mws[0].setPathToAliases(pathMap);
    mws[1].setPathToAliases(pathMap);
    rws[0].setReducer(op);
    rws[1].setReducer(op);
    TezEdgeProperty edgeProp = new TezEdgeProperty(EdgeType.SIMPLE_EDGE);
    work.connect(mws[0], rws[0], edgeProp);
    work.connect(mws[1], rws[0], edgeProp);
    work.connect(rws[0], rws[1], edgeProp);
    task = new TezTask(utils);
    task.setWork(work);
    task.setConsole(mock(LogHelper.class));
    QueryPlan mockQueryPlan = mock(QueryPlan.class);
    doReturn(UUID.randomUUID().toString()).when(mockQueryPlan).getQueryId();
    task.setQueryPlan(mockQueryPlan);
    conf = new JobConf();
    appLr = createResource("foo.jar");
    HiveConf hiveConf = new HiveConf();
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    SessionState.start(hiveConf);
    session = mock(TezClient.class);
    sessionState = mock(TezSessionState.class);
    when(sessionState.getSession()).thenReturn(session);
    when(sessionState.reopen()).thenReturn(sessionState);
    when(session.submitDAG(any(DAG.class))).thenThrow(new SessionNotRunning("")).thenReturn(mock(DAGClient.class));
}
Also used : Operator(org.apache.hadoop.hive.ql.exec.Operator) Vertex(org.apache.tez.dag.api.Vertex) Configuration(org.apache.hadoop.conf.Configuration) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) TezEdgeProperty(org.apache.hadoop.hive.ql.plan.TezEdgeProperty) ArrayList(java.util.ArrayList) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) LinkedHashMap(java.util.LinkedHashMap) TezClient(org.apache.tez.client.TezClient) SessionNotRunning(org.apache.tez.dag.api.SessionNotRunning) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) JobConf(org.apache.hadoop.mapred.JobConf) BaseWork(org.apache.hadoop.hive.ql.plan.BaseWork) Path(org.apache.hadoop.fs.Path) Context(org.apache.hadoop.hive.ql.Context) ReduceWork(org.apache.hadoop.hive.ql.plan.ReduceWork) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) InvocationOnMock(org.mockito.invocation.InvocationOnMock) DAGClient(org.apache.tez.dag.api.client.DAGClient) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Edge(org.apache.tez.dag.api.Edge) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) TezWork(org.apache.hadoop.hive.ql.plan.TezWork) VertexType(org.apache.hadoop.hive.ql.plan.TezWork.VertexType) Before(org.junit.Before)

Example 39 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class TestOperators method testMapOperator.

public void testMapOperator() throws Throwable {
    try {
        System.out.println("Testing Map Operator");
        // initialize configuration
        JobConf hconf = new JobConf(TestOperators.class);
        hconf.set(MRJobConfig.MAP_INPUT_FILE, "hdfs:///testDir/testFile");
        IOContextMap.get(hconf).setInputPath(new Path("hdfs:///testDir/testFile"));
        // initialize pathToAliases
        ArrayList<String> aliases = new ArrayList<String>();
        aliases.add("a");
        aliases.add("b");
        LinkedHashMap<Path, ArrayList<String>> pathToAliases = new LinkedHashMap<>();
        pathToAliases.put(new Path("hdfs:///testDir"), aliases);
        // initialize pathToTableInfo
        // Default: treat the table as a single column "col"
        TableDesc td = Utilities.defaultTd;
        PartitionDesc pd = new PartitionDesc(td, null);
        LinkedHashMap<Path, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo = new LinkedHashMap<>();
        pathToPartitionInfo.put(new Path("hdfs:///testDir"), pd);
        // initialize aliasToWork
        CompilationOpContext ctx = new CompilationOpContext();
        CollectDesc cd = new CollectDesc(Integer.valueOf(1));
        CollectOperator cdop1 = (CollectOperator) OperatorFactory.get(ctx, CollectDesc.class);
        cdop1.setConf(cd);
        CollectOperator cdop2 = (CollectOperator) OperatorFactory.get(ctx, CollectDesc.class);
        cdop2.setConf(cd);
        LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
        aliasToWork.put("a", cdop1);
        aliasToWork.put("b", cdop2);
        // initialize mapredWork
        MapredWork mrwork = new MapredWork();
        mrwork.getMapWork().setPathToAliases(pathToAliases);
        mrwork.getMapWork().setPathToPartitionInfo(pathToPartitionInfo);
        mrwork.getMapWork().setAliasToWork(aliasToWork);
        // get map operator and initialize it
        MapOperator mo = new MapOperator(new CompilationOpContext());
        mo.initializeAsRoot(hconf, mrwork.getMapWork());
        Text tw = new Text();
        InspectableObject io1 = new InspectableObject();
        InspectableObject io2 = new InspectableObject();
        for (int i = 0; i < 5; i++) {
            String answer = "[[" + i + ", " + (i + 1) + ", " + (i + 2) + "]]";
            tw.set("" + i + "\u0001" + (i + 1) + "\u0001" + (i + 2));
            mo.process(tw);
            cdop1.retrieve(io1);
            cdop2.retrieve(io2);
            System.out.println("io1.o.toString() = " + io1.o.toString());
            System.out.println("io2.o.toString() = " + io2.o.toString());
            System.out.println("answer.toString() = " + answer.toString());
            assertEquals(answer.toString(), io1.o.toString());
            assertEquals(answer.toString(), io2.o.toString());
        }
        System.out.println("Map Operator ok");
    } catch (Throwable e) {
        e.printStackTrace();
        throw (e);
    }
}
Also used : ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) InspectableObject(org.apache.hadoop.hive.serde2.objectinspector.InspectableObject) MapredWork(org.apache.hadoop.hive.ql.plan.MapredWork) JobConf(org.apache.hadoop.mapred.JobConf) Path(org.apache.hadoop.fs.Path) CollectDesc(org.apache.hadoop.hive.ql.plan.CollectDesc) Text(org.apache.hadoop.io.Text) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) PartitionDesc(org.apache.hadoop.hive.ql.plan.PartitionDesc) TableDesc(org.apache.hadoop.hive.ql.plan.TableDesc) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 40 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class ExplainTask method outputPlan.

@VisibleForTesting
JSONObject outputPlan(Object work, PrintStream out, boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception {
    // Are we running tests?
    final boolean inTest = queryState.getConf().getBoolVar(ConfVars.HIVE_IN_TEST);
    // Check if work has an explain annotation
    Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class);
    String keyJSONObject = null;
    if (note instanceof Explain) {
        Explain xpl_note = (Explain) note;
        boolean invokeFlag = false;
        if (this.work != null && this.work.isUserLevelExplain()) {
            invokeFlag = Level.USER.in(xpl_note.explainLevels());
        } else {
            if (extended) {
                invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
            } else {
                invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
            }
        }
        if (invokeFlag) {
            Vectorization vectorization = xpl_note.vectorization();
            if (this.work != null && this.work.isVectorization()) {
                // The EXPLAIN VECTORIZATION option was specified.
                final boolean desireOnly = this.work.isVectorizationOnly();
                final VectorizationDetailLevel desiredVecDetailLevel = this.work.isVectorizationDetailLevel();
                switch(vectorization) {
                    case NON_VECTORIZED:
                        // Display all non-vectorized leaf objects unless ONLY.
                        if (desireOnly) {
                            invokeFlag = false;
                        }
                        break;
                    case SUMMARY:
                    case OPERATOR:
                    case EXPRESSION:
                    case DETAIL:
                        if (vectorization.rank < desiredVecDetailLevel.rank) {
                            // This detail not desired.
                            invokeFlag = false;
                        }
                        break;
                    case SUMMARY_PATH:
                    case OPERATOR_PATH:
                        if (desireOnly) {
                            if (vectorization.rank < desiredVecDetailLevel.rank) {
                                // Suppress headers and all objects below.
                                invokeFlag = false;
                            }
                        }
                        break;
                    default:
                        throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
                }
            } else {
                // Do not display vectorization objects.
                switch(vectorization) {
                    case SUMMARY:
                    case OPERATOR:
                    case EXPRESSION:
                    case DETAIL:
                        invokeFlag = false;
                        break;
                    case NON_VECTORIZED:
                        // No action.
                        break;
                    case SUMMARY_PATH:
                    case OPERATOR_PATH:
                        // Always include headers since they contain non-vectorized objects, too.
                        break;
                    default:
                        throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
                }
            }
        }
        if (invokeFlag) {
            keyJSONObject = xpl_note.displayName();
            if (out != null) {
                out.print(indentString(indent));
                if (appendToHeader != null && !appendToHeader.isEmpty()) {
                    out.println(xpl_note.displayName() + appendToHeader);
                } else {
                    out.println(xpl_note.displayName());
                }
            }
        }
    }
    JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null;
    // conf and then the children
    if (work instanceof Operator) {
        Operator<? extends OperatorDesc> operator = (Operator<? extends OperatorDesc>) work;
        if (operator.getConf() != null) {
            String appender = isLogical ? " (" + operator.getOperatorId() + ")" : "";
            JSONObject jsonOut = outputPlan(operator.getConf(), out, extended, jsonOutput, jsonOutput ? 0 : indent, appender);
            if (this.work != null && (this.work.isUserLevelExplain() || this.work.isFormatted())) {
                if (jsonOut != null && jsonOut.length() > 0) {
                    ((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("OperatorId:", operator.getOperatorId());
                }
                if (!this.work.isUserLevelExplain() && this.work.isFormatted() && operator.getConf() instanceof ReduceSinkDesc) {
                    ((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("outputname:", ((ReduceSinkDesc) operator.getConf()).getOutputName());
                }
            }
            if (jsonOutput) {
                json = jsonOut;
            }
        }
        if (!visitedOps.contains(operator) || !isLogical) {
            visitedOps.add(operator);
            if (operator.getChildOperators() != null) {
                int cindent = jsonOutput ? 0 : indent + 2;
                for (Operator<? extends OperatorDesc> op : operator.getChildOperators()) {
                    JSONObject jsonOut = outputPlan(op, out, extended, jsonOutput, cindent);
                    if (jsonOutput) {
                        ((JSONObject) json.get(JSONObject.getNames(json)[0])).accumulate("children", jsonOut);
                    }
                }
            }
        }
        if (jsonOutput) {
            return json;
        }
        return null;
    }
    // We look at all methods that generate values for explain
    Method[] methods = work.getClass().getMethods();
    Arrays.sort(methods, new MethodComparator());
    for (Method m : methods) {
        int prop_indents = jsonOutput ? 0 : indent + 2;
        note = AnnotationUtils.getAnnotation(m, Explain.class);
        if (note instanceof Explain) {
            Explain xpl_note = (Explain) note;
            boolean invokeFlag = false;
            if (this.work != null && this.work.isUserLevelExplain()) {
                invokeFlag = Level.USER.in(xpl_note.explainLevels());
            } else {
                if (extended) {
                    invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
                } else {
                    invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
                }
            }
            if (invokeFlag) {
                Vectorization vectorization = xpl_note.vectorization();
                if (this.work != null && this.work.isVectorization()) {
                    // The EXPLAIN VECTORIZATION option was specified.
                    final boolean desireOnly = this.work.isVectorizationOnly();
                    final VectorizationDetailLevel desiredVecDetailLevel = this.work.isVectorizationDetailLevel();
                    switch(vectorization) {
                        case NON_VECTORIZED:
                            // Display all non-vectorized leaf objects unless ONLY.
                            if (desireOnly) {
                                invokeFlag = false;
                            }
                            break;
                        case SUMMARY:
                        case OPERATOR:
                        case EXPRESSION:
                        case DETAIL:
                            if (vectorization.rank < desiredVecDetailLevel.rank) {
                                // This detail not desired.
                                invokeFlag = false;
                            }
                            break;
                        case SUMMARY_PATH:
                        case OPERATOR_PATH:
                            if (desireOnly) {
                                if (vectorization.rank < desiredVecDetailLevel.rank) {
                                    // Suppress headers and all objects below.
                                    invokeFlag = false;
                                }
                            }
                            break;
                        default:
                            throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
                    }
                } else {
                    // Do not display vectorization objects.
                    switch(vectorization) {
                        case SUMMARY:
                        case OPERATOR:
                        case EXPRESSION:
                        case DETAIL:
                            invokeFlag = false;
                            break;
                        case NON_VECTORIZED:
                            // No action.
                            break;
                        case SUMMARY_PATH:
                        case OPERATOR_PATH:
                            // Always include headers since they contain non-vectorized objects, too.
                            break;
                        default:
                            throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
                    }
                }
            }
            if (invokeFlag) {
                Object val = null;
                try {
                    if (postProcess(xpl_note)) {
                        val = m.invoke(work, inTest);
                    } else {
                        val = m.invoke(work);
                    }
                } catch (InvocationTargetException ex) {
                    // Ignore the exception, this may be caused by external jars
                    val = null;
                }
                if (val == null) {
                    continue;
                }
                if (xpl_note.jsonOnly() && !jsonOutput) {
                    continue;
                }
                String header = null;
                boolean skipHeader = xpl_note.skipHeader();
                boolean emptyHeader = false;
                if (!xpl_note.displayName().equals("")) {
                    header = indentString(prop_indents) + xpl_note.displayName() + ":";
                } else {
                    emptyHeader = true;
                    prop_indents = indent;
                    header = indentString(prop_indents);
                }
                // Try the output as a primitive object
                if (isPrintable(val)) {
                    if (out != null && shouldPrint(xpl_note, val)) {
                        if (!skipHeader) {
                            out.print(header);
                            out.print(" ");
                        }
                        out.println(val);
                    }
                    if (jsonOutput && shouldPrint(xpl_note, val)) {
                        json.put(header, val.toString());
                    }
                    continue;
                }
                int ind = 0;
                if (!jsonOutput) {
                    if (!skipHeader) {
                        ind = prop_indents + 2;
                    } else {
                        ind = indent;
                    }
                }
                // Try this as a map
                if (val instanceof Map) {
                    // Go through the map and print out the stuff
                    Map<?, ?> mp = (Map<?, ?>) val;
                    if (out != null && !skipHeader && mp != null && !mp.isEmpty()) {
                        out.print(header);
                    }
                    JSONObject jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, extended, jsonOutput, ind);
                    if (jsonOutput && !mp.isEmpty()) {
                        json.put(header, jsonOut);
                    }
                    continue;
                }
                // Try this as a list
                if (val instanceof List || val instanceof Set) {
                    List l = val instanceof List ? (List) val : new ArrayList((Set) val);
                    if (out != null && !skipHeader && l != null && !l.isEmpty()) {
                        out.print(header);
                    }
                    JSONArray jsonOut = outputList(l, out, !skipHeader && !emptyHeader, extended, jsonOutput, ind);
                    if (jsonOutput && !l.isEmpty()) {
                        json.put(header, jsonOut);
                    }
                    continue;
                }
                // Finally check if it is serializable
                try {
                    if (!skipHeader && out != null) {
                        out.println(header);
                    }
                    JSONObject jsonOut = outputPlan(val, out, extended, jsonOutput, ind);
                    if (jsonOutput && jsonOut != null && jsonOut.length() != 0) {
                        if (!skipHeader) {
                            json.put(header, jsonOut);
                        } else {
                            for (String k : JSONObject.getNames(jsonOut)) {
                                json.put(k, jsonOut.get(k));
                            }
                        }
                    }
                    continue;
                } catch (ClassCastException ce) {
                // Ignore
                }
            }
        }
    }
    if (jsonOutput) {
        if (keyJSONObject != null) {
            JSONObject ret = new JSONObject(new LinkedHashMap<>());
            ret.put(keyJSONObject, json);
            return ret;
        }
        return json;
    }
    return null;
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) ArrayList(java.util.ArrayList) Vectorization(org.apache.hadoop.hive.ql.plan.Explain.Vectorization) LinkedHashMap(java.util.LinkedHashMap) List(java.util.List) ArrayList(java.util.ArrayList) ReduceSinkDesc(org.apache.hadoop.hive.ql.plan.ReduceSinkDesc) Explain(org.apache.hadoop.hive.ql.plan.Explain) JSONArray(org.json.JSONArray) Method(java.lang.reflect.Method) Annotation(java.lang.annotation.Annotation) InvocationTargetException(java.lang.reflect.InvocationTargetException) VectorizationDetailLevel(org.apache.hadoop.hive.ql.parse.ExplainConfiguration.VectorizationDetailLevel) JSONObject(org.json.JSONObject) JSONObject(org.json.JSONObject) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

OperatorDesc (org.apache.hadoop.hive.ql.plan.OperatorDesc)87 Operator (org.apache.hadoop.hive.ql.exec.Operator)70 ArrayList (java.util.ArrayList)50 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)44 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)41 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)36 JoinOperator (org.apache.hadoop.hive.ql.exec.JoinOperator)31 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)30 UnionOperator (org.apache.hadoop.hive.ql.exec.UnionOperator)27 Path (org.apache.hadoop.fs.Path)21 SMBMapJoinOperator (org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator)21 LinkedHashMap (java.util.LinkedHashMap)18 Serializable (java.io.Serializable)17 Task (org.apache.hadoop.hive.ql.exec.Task)17 MapWork (org.apache.hadoop.hive.ql.plan.MapWork)17 HashMap (java.util.HashMap)16 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)16 TableDesc (org.apache.hadoop.hive.ql.plan.TableDesc)16 List (java.util.List)15 Map (java.util.Map)14