Search in sources :

Example 1 with GroupInputSpecProto

use of org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto in project hive by apache.

the class Converters method getTaskSpecfromProto.

public static TaskSpec getTaskSpecfromProto(SignableVertexSpec vectorProto, int fragmentNum, int attemptNum, TezTaskAttemptID attemptId) {
    TezTaskAttemptID taskAttemptID = attemptId != null ? attemptId : createTaskAttemptId(vectorProto.getQueryIdentifier(), vectorProto.getVertexIndex(), fragmentNum, attemptNum);
    ProcessorDescriptor processorDescriptor = null;
    if (vectorProto.hasProcessorDescriptor()) {
        processorDescriptor = convertProcessorDescriptorFromProto(vectorProto.getProcessorDescriptor());
    }
    List<InputSpec> inputSpecList = new ArrayList<InputSpec>(vectorProto.getInputSpecsCount());
    if (vectorProto.getInputSpecsCount() > 0) {
        for (IOSpecProto inputSpecProto : vectorProto.getInputSpecsList()) {
            inputSpecList.add(getInputSpecFromProto(inputSpecProto));
        }
    }
    List<OutputSpec> outputSpecList = new ArrayList<OutputSpec>(vectorProto.getOutputSpecsCount());
    if (vectorProto.getOutputSpecsCount() > 0) {
        for (IOSpecProto outputSpecProto : vectorProto.getOutputSpecsList()) {
            outputSpecList.add(getOutputSpecFromProto(outputSpecProto));
        }
    }
    List<GroupInputSpec> groupInputSpecs = new ArrayList<GroupInputSpec>(vectorProto.getGroupedInputSpecsCount());
    if (vectorProto.getGroupedInputSpecsCount() > 0) {
        for (GroupInputSpecProto groupInputSpecProto : vectorProto.getGroupedInputSpecsList()) {
            groupInputSpecs.add(getGroupInputSpecFromProto(groupInputSpecProto));
        }
    }
    TaskSpec taskSpec = new TaskSpec(taskAttemptID, vectorProto.getDagName(), vectorProto.getVertexName(), vectorProto.getVertexParallelism(), processorDescriptor, inputSpecList, outputSpecList, groupInputSpecs);
    return taskSpec;
}
Also used : IOSpecProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto) TaskSpec(org.apache.tez.runtime.api.impl.TaskSpec) ArrayList(java.util.ArrayList) ProcessorDescriptor(org.apache.tez.dag.api.ProcessorDescriptor) InputSpec(org.apache.tez.runtime.api.impl.InputSpec) GroupInputSpec(org.apache.tez.runtime.api.impl.GroupInputSpec) GroupInputSpecProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto) TezTaskAttemptID(org.apache.tez.dag.records.TezTaskAttemptID) OutputSpec(org.apache.tez.runtime.api.impl.OutputSpec) GroupInputSpec(org.apache.tez.runtime.api.impl.GroupInputSpec)

Example 2 with GroupInputSpecProto

use of org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto in project hive by apache.

the class ContainerRunnerImpl method stringifySubmitRequest.

public static String stringifySubmitRequest(SubmitWorkRequestProto request, SignableVertexSpec vertex) {
    StringBuilder sb = new StringBuilder();
    sb.append("am_details=").append(request.getAmHost()).append(":").append(request.getAmPort());
    sb.append(", taskInfo=").append(" fragment ").append(request.getFragmentNumber()).append(" attempt ").append(request.getAttemptNumber());
    sb.append(", user=").append(vertex.getUser());
    sb.append(", queryId=").append(vertex.getHiveQueryId());
    sb.append(", appIdString=").append(vertex.getQueryIdentifier().getApplicationIdString());
    sb.append(", appAttemptNum=").append(vertex.getQueryIdentifier().getAppAttemptNumber());
    sb.append(", containerIdString=").append(request.getContainerIdString());
    sb.append(", dagName=").append(vertex.getDagName());
    sb.append(", vertexName=").append(vertex.getVertexName());
    sb.append(", processor=").append(vertex.getProcessorDescriptor().getClassName());
    sb.append(", numInputs=").append(vertex.getInputSpecsCount());
    sb.append(", numOutputs=").append(vertex.getOutputSpecsCount());
    sb.append(", numGroupedInputs=").append(vertex.getGroupedInputSpecsCount());
    sb.append(", Inputs={");
    if (vertex.getInputSpecsCount() > 0) {
        for (IOSpecProto ioSpec : vertex.getInputSpecsList()) {
            sb.append("{").append(ioSpec.getConnectedVertexName()).append(",").append(ioSpec.getIoDescriptor().getClassName()).append(",").append(ioSpec.getPhysicalEdgeCount()).append("}");
        }
    }
    sb.append("}");
    sb.append(", Outputs={");
    if (vertex.getOutputSpecsCount() > 0) {
        for (IOSpecProto ioSpec : vertex.getOutputSpecsList()) {
            sb.append("{").append(ioSpec.getConnectedVertexName()).append(",").append(ioSpec.getIoDescriptor().getClassName()).append(",").append(ioSpec.getPhysicalEdgeCount()).append("}");
        }
    }
    sb.append("}");
    sb.append(", GroupedInputs={");
    if (vertex.getGroupedInputSpecsCount() > 0) {
        for (GroupInputSpecProto group : vertex.getGroupedInputSpecsList()) {
            sb.append("{").append("groupName=").append(group.getGroupName()).append(", elements=").append(group.getGroupVerticesList()).append("}");
            sb.append(group.getGroupVerticesList());
        }
    }
    sb.append("}");
    FragmentRuntimeInfo fragmentRuntimeInfo = request.getFragmentRuntimeInfo();
    sb.append(", FragmentRuntimeInfo={");
    sb.append("taskCount=").append(fragmentRuntimeInfo.getNumSelfAndUpstreamTasks());
    sb.append(", completedTaskCount=").append(fragmentRuntimeInfo.getNumSelfAndUpstreamCompletedTasks());
    sb.append(", dagStartTime=").append(fragmentRuntimeInfo.getDagStartTime());
    sb.append(", firstAttemptStartTime=").append(fragmentRuntimeInfo.getFirstAttemptStartTime());
    sb.append(", currentAttemptStartTime=").append(fragmentRuntimeInfo.getCurrentAttemptStartTime());
    sb.append("}");
    return sb.toString();
}
Also used : IOSpecProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto) FragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo) GroupInputSpecProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto)

Aggregations

GroupInputSpecProto (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto)2 IOSpecProto (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto)2 ArrayList (java.util.ArrayList)1 FragmentRuntimeInfo (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo)1 ProcessorDescriptor (org.apache.tez.dag.api.ProcessorDescriptor)1 TezTaskAttemptID (org.apache.tez.dag.records.TezTaskAttemptID)1 GroupInputSpec (org.apache.tez.runtime.api.impl.GroupInputSpec)1 InputSpec (org.apache.tez.runtime.api.impl.InputSpec)1 OutputSpec (org.apache.tez.runtime.api.impl.OutputSpec)1 TaskSpec (org.apache.tez.runtime.api.impl.TaskSpec)1