Search in sources :

Example 6 with IntermediateDataSet

use of org.apache.flink.runtime.jobgraph.IntermediateDataSet in project flink by apache.

the class DefaultLogicalVertexTest method buildVerticesAndResults.

private void buildVerticesAndResults() {
    resultMap = new HashMap<>();
    results = new HashSet<>();
    final int parallelism = 3;
    upstreamJobVertex = createNoOpVertex(parallelism);
    downstreamJobVertex = createNoOpVertex(parallelism);
    for (int i = 0; i < 5; i++) {
        final JobEdge edge = downstreamJobVertex.connectNewDataSetAsInput(upstreamJobVertex, ALL_TO_ALL, PIPELINED);
        final IntermediateDataSet consumedDataSet = edge.getSource();
        results.add(consumedDataSet);
        resultMap.put(consumedDataSet.getId(), consumedDataSet);
    }
}
Also used : IntermediateDataSet(org.apache.flink.runtime.jobgraph.IntermediateDataSet) JobEdge(org.apache.flink.runtime.jobgraph.JobEdge)

Example 7 with IntermediateDataSet

use of org.apache.flink.runtime.jobgraph.IntermediateDataSet in project flink by apache.

the class ExecutionGraphConstructionTest method testAttachViaDataSets.

@Test
public void testAttachViaDataSets() throws Exception {
    final JobID jobId = new JobID();
    final String jobName = "Test Job Sample Name";
    final Configuration cfg = new Configuration();
    // construct part one of the execution graph
    JobVertex v1 = new JobVertex("vertex1");
    JobVertex v2 = new JobVertex("vertex2");
    JobVertex v3 = new JobVertex("vertex3");
    v1.setParallelism(5);
    v2.setParallelism(7);
    v3.setParallelism(2);
    v1.setInvokableClass(AbstractInvokable.class);
    v2.setInvokableClass(AbstractInvokable.class);
    v3.setInvokableClass(AbstractInvokable.class);
    // this creates an intermediate result for v1
    v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
    // create results for v2 and v3
    IntermediateDataSet v2result = v2.createAndAddResultDataSet(ResultPartitionType.PIPELINED);
    IntermediateDataSet v3result_1 = v3.createAndAddResultDataSet(ResultPartitionType.PIPELINED);
    IntermediateDataSet v3result_2 = v3.createAndAddResultDataSet(ResultPartitionType.PIPELINED);
    List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3));
    ExecutionGraph eg = new ExecutionGraph(TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jobId, jobName, cfg, new SerializedValue<>(new ExecutionConfig()), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new Scheduler(TestingUtils.defaultExecutionContext()));
    try {
        eg.attachJobGraph(ordered);
    } catch (JobException e) {
        e.printStackTrace();
        fail("Job failed with exception: " + e.getMessage());
    }
    // attach the second part of the graph
    JobVertex v4 = new JobVertex("vertex4");
    JobVertex v5 = new JobVertex("vertex5");
    v4.setParallelism(11);
    v5.setParallelism(4);
    v4.setInvokableClass(AbstractInvokable.class);
    v5.setInvokableClass(AbstractInvokable.class);
    v4.connectDataSetAsInput(v2result, DistributionPattern.ALL_TO_ALL);
    v4.connectDataSetAsInput(v3result_1, DistributionPattern.ALL_TO_ALL);
    v5.connectNewDataSetAsInput(v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
    v5.connectDataSetAsInput(v3result_2, DistributionPattern.ALL_TO_ALL);
    List<JobVertex> ordered2 = new ArrayList<JobVertex>(Arrays.asList(v4, v5));
    try {
        eg.attachJobGraph(ordered2);
    } catch (JobException e) {
        e.printStackTrace();
        fail("Job failed with exception: " + e.getMessage());
    }
    // verify
    verifyTestGraph(eg, jobId, v1, v2, v3, v4, v5);
}
Also used : IntermediateDataSet(org.apache.flink.runtime.jobgraph.IntermediateDataSet) Configuration(org.apache.flink.configuration.Configuration) Scheduler(org.apache.flink.runtime.jobmanager.scheduler.Scheduler) ArrayList(java.util.ArrayList) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) NoRestartStrategy(org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy) JobException(org.apache.flink.runtime.JobException) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) JobID(org.apache.flink.api.common.JobID) Test(org.junit.Test)

Example 8 with IntermediateDataSet

use of org.apache.flink.runtime.jobgraph.IntermediateDataSet in project flink by apache.

the class SsgNetworkMemoryCalculationUtils method getMaxSubpartitionNums.

private static Map<IntermediateDataSetID, Integer> getMaxSubpartitionNums(ExecutionJobVertex ejv, Function<JobVertexID, ExecutionJobVertex> ejvs) {
    Map<IntermediateDataSetID, Integer> ret = new HashMap<>();
    List<IntermediateDataSet> producedDataSets = ejv.getJobVertex().getProducedDataSets();
    for (int i = 0; i < producedDataSets.size(); i++) {
        IntermediateDataSet producedDataSet = producedDataSets.get(i);
        JobEdge outputEdge = checkNotNull(producedDataSet.getConsumer());
        ExecutionJobVertex consumerJobVertex = ejvs.apply(outputEdge.getTarget().getID());
        int maxNum = EdgeManagerBuildUtil.computeMaxEdgesToTargetExecutionVertex(ejv.getParallelism(), consumerJobVertex.getParallelism(), outputEdge.getDistributionPattern());
        ret.put(producedDataSet.getId(), maxNum);
    }
    return ret;
}
Also used : IntermediateDataSet(org.apache.flink.runtime.jobgraph.IntermediateDataSet) HashMap(java.util.HashMap) ExecutionJobVertex(org.apache.flink.runtime.executiongraph.ExecutionJobVertex) JobEdge(org.apache.flink.runtime.jobgraph.JobEdge) IntermediateDataSetID(org.apache.flink.runtime.jobgraph.IntermediateDataSetID)

Example 9 with IntermediateDataSet

use of org.apache.flink.runtime.jobgraph.IntermediateDataSet in project flink by apache.

the class StateAssignmentOperation method reDistributeResultSubpartitionStates.

public <I, T extends AbstractChannelStateHandle<I>> void reDistributeResultSubpartitionStates(TaskStateAssignment assignment) {
    if (!assignment.hasOutputState) {
        return;
    }
    checkForUnsupportedToplogyChanges(assignment.oldState, OperatorSubtaskState::getResultSubpartitionState, assignment.outputOperatorID);
    final OperatorState outputState = assignment.oldState.get(assignment.outputOperatorID);
    final List<List<ResultSubpartitionStateHandle>> outputOperatorState = splitBySubtasks(outputState, OperatorSubtaskState::getResultSubpartitionState);
    final ExecutionJobVertex executionJobVertex = assignment.executionJobVertex;
    final List<IntermediateDataSet> outputs = executionJobVertex.getJobVertex().getProducedDataSets();
    if (outputState.getParallelism() == executionJobVertex.getParallelism()) {
        assignment.resultSubpartitionStates.putAll(toInstanceMap(assignment.outputOperatorID, outputOperatorState));
        return;
    }
    // according to output mapping.
    for (int partitionIndex = 0; partitionIndex < outputs.size(); partitionIndex++) {
        final List<List<ResultSubpartitionStateHandle>> partitionState = outputs.size() == 1 ? outputOperatorState : getPartitionState(outputOperatorState, ResultSubpartitionInfo::getPartitionIdx, partitionIndex);
        final MappingBasedRepartitioner<ResultSubpartitionStateHandle> repartitioner = new MappingBasedRepartitioner<>(assignment.getOutputMapping(partitionIndex).getRescaleMappings());
        final Map<OperatorInstanceID, List<ResultSubpartitionStateHandle>> repartitioned = applyRepartitioner(assignment.outputOperatorID, repartitioner, partitionState, outputOperatorState.size(), executionJobVertex.getParallelism());
        addToSubtasks(assignment.resultSubpartitionStates, repartitioned);
    }
}
Also used : OperatorInstanceID(org.apache.flink.runtime.jobgraph.OperatorInstanceID) IntermediateDataSet(org.apache.flink.runtime.jobgraph.IntermediateDataSet) ExecutionJobVertex(org.apache.flink.runtime.executiongraph.ExecutionJobVertex) ResultSubpartitionStateHandle(org.apache.flink.runtime.state.ResultSubpartitionStateHandle) ArrayList(java.util.ArrayList) Collections.emptyList(java.util.Collections.emptyList) List(java.util.List)

Example 10 with IntermediateDataSet

use of org.apache.flink.runtime.jobgraph.IntermediateDataSet in project flink by apache.

the class DefaultLogicalTopology method buildVerticesAndResults.

private void buildVerticesAndResults(final Iterable<JobVertex> topologicallySortedJobVertices) {
    final Function<JobVertexID, DefaultLogicalVertex> vertexRetriever = this::getVertex;
    final Function<IntermediateDataSetID, DefaultLogicalResult> resultRetriever = this::getResult;
    for (JobVertex jobVertex : topologicallySortedJobVertices) {
        final DefaultLogicalVertex logicalVertex = new DefaultLogicalVertex(jobVertex, resultRetriever);
        this.verticesSorted.add(logicalVertex);
        this.idToVertexMap.put(logicalVertex.getId(), logicalVertex);
        for (IntermediateDataSet intermediateDataSet : jobVertex.getProducedDataSets()) {
            final DefaultLogicalResult logicalResult = new DefaultLogicalResult(intermediateDataSet, vertexRetriever);
            idToResultMap.put(logicalResult.getId(), logicalResult);
        }
    }
}
Also used : JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) IntermediateDataSet(org.apache.flink.runtime.jobgraph.IntermediateDataSet) JobVertexID(org.apache.flink.runtime.jobgraph.JobVertexID) IntermediateDataSetID(org.apache.flink.runtime.jobgraph.IntermediateDataSetID)

Aggregations

IntermediateDataSet (org.apache.flink.runtime.jobgraph.IntermediateDataSet)11 JobVertex (org.apache.flink.runtime.jobgraph.JobVertex)6 ArrayList (java.util.ArrayList)5 JobException (org.apache.flink.runtime.JobException)4 Test (org.junit.Test)4 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)3 JobID (org.apache.flink.api.common.JobID)3 Configuration (org.apache.flink.configuration.Configuration)3 NoRestartStrategy (org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy)3 IntermediateDataSetID (org.apache.flink.runtime.jobgraph.IntermediateDataSetID)3 JobEdge (org.apache.flink.runtime.jobgraph.JobEdge)3 Scheduler (org.apache.flink.runtime.jobmanager.scheduler.Scheduler)3 HashMap (java.util.HashMap)2 ExecutionJobVertex (org.apache.flink.runtime.executiongraph.ExecutionJobVertex)2 IOException (java.io.IOException)1 String.format (java.lang.String.format)1 Collection (java.util.Collection)1 Collections.emptyList (java.util.Collections.emptyList)1 List (java.util.List)1 ListIterator (java.util.ListIterator)1