Search in sources :

Example 11 with SerializedValue

use of org.apache.flink.util.SerializedValue in project flink by apache.

the class TaskTest method createTask.

private Task createTask(Class<? extends AbstractInvokable> invokable, LibraryCacheManager libCache, NetworkEnvironment networkEnvironment, ResultPartitionConsumableNotifier consumableNotifier, PartitionProducerStateChecker partitionProducerStateChecker, Executor executor, Configuration taskManagerConfig, ExecutionConfig execConfig) throws IOException {
    JobID jobId = new JobID();
    JobVertexID jobVertexId = new JobVertexID();
    ExecutionAttemptID executionAttemptId = new ExecutionAttemptID();
    InputSplitProvider inputSplitProvider = new TaskInputSplitProvider(jobManagerGateway, jobId, jobVertexId, executionAttemptId, new FiniteDuration(60, TimeUnit.SECONDS));
    CheckpointResponder checkpointResponder = new ActorGatewayCheckpointResponder(jobManagerGateway);
    SerializedValue<ExecutionConfig> serializedExecutionConfig = new SerializedValue<>(execConfig);
    JobInformation jobInformation = new JobInformation(jobId, "Test Job", serializedExecutionConfig, new Configuration(), Collections.<BlobKey>emptyList(), Collections.<URL>emptyList());
    TaskInformation taskInformation = new TaskInformation(jobVertexId, "Test Task", 1, 1, invokable.getName(), new Configuration());
    return new Task(jobInformation, taskInformation, executionAttemptId, new AllocationID(), 0, 0, Collections.<ResultPartitionDeploymentDescriptor>emptyList(), Collections.<InputGateDeploymentDescriptor>emptyList(), 0, null, mock(MemoryManager.class), mock(IOManager.class), networkEnvironment, mock(BroadcastVariableManager.class), taskManagerConnection, inputSplitProvider, checkpointResponder, libCache, mock(FileCache.class), new TestingTaskManagerRuntimeInfo(taskManagerConfig), mock(TaskMetricGroup.class), consumableNotifier, partitionProducerStateChecker, executor);
}
Also used : JobInformation(org.apache.flink.runtime.executiongraph.JobInformation) ExecutionAttemptID(org.apache.flink.runtime.executiongraph.ExecutionAttemptID) TaskInformation(org.apache.flink.runtime.executiongraph.TaskInformation) Configuration(org.apache.flink.configuration.Configuration) IOManager(org.apache.flink.runtime.io.disk.iomanager.IOManager) TaskMetricGroup(org.apache.flink.runtime.metrics.groups.TaskMetricGroup) JobVertexID(org.apache.flink.runtime.jobgraph.JobVertexID) AllocationID(org.apache.flink.runtime.clusterframework.types.AllocationID) FiniteDuration(scala.concurrent.duration.FiniteDuration) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) SerializedValue(org.apache.flink.util.SerializedValue) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) FileCache(org.apache.flink.runtime.filecache.FileCache) TestingTaskManagerRuntimeInfo(org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo) BroadcastVariableManager(org.apache.flink.runtime.broadcast.BroadcastVariableManager) InputSplitProvider(org.apache.flink.runtime.jobgraph.tasks.InputSplitProvider) JobID(org.apache.flink.api.common.JobID)

Example 12 with SerializedValue

use of org.apache.flink.util.SerializedValue in project flink by apache.

the class ExecutionGraphSignalsTest method prepare.

@Before
public void prepare() throws Exception {
    final JobID jobId = new JobID();
    final String jobName = "Test Job Sample Name";
    final Configuration cfg = new Configuration();
    assert (mockEJV.length == 5);
    JobVertex v1 = new JobVertex("vertex1");
    JobVertex v2 = new JobVertex("vertex2");
    JobVertex v3 = new JobVertex("vertex3");
    JobVertex v4 = new JobVertex("vertex4");
    JobVertex v5 = new JobVertex("vertex5");
    for (int i = 0; i < mockEJV.length; ++i) {
        mockEJV[i] = mock(ExecutionJobVertex.class);
        this.mockEV[i] = new ExecutionVertex[dop[i]];
        for (int j = 0; j < dop[i]; ++j) {
            this.mockEV[i][j] = mock(ExecutionVertex.class);
        }
        when(mockEJV[i].getProducedDataSets()).thenReturn(new IntermediateResult[0]);
        when(mockEJV[i].getTaskVertices()).thenReturn(this.mockEV[i]);
    }
    PowerMockito.whenNew(ExecutionJobVertex.class).withArguments(any(ExecutionGraph.class), same(v1), any(Integer.class).intValue(), any(FiniteDuration.class), any(Long.class).longValue()).thenReturn(mockEJV[0]);
    PowerMockito.whenNew(ExecutionJobVertex.class).withArguments(any(ExecutionGraph.class), same(v2), any(Integer.class).intValue(), any(FiniteDuration.class), any(Long.class).longValue()).thenReturn(mockEJV[1]);
    PowerMockito.whenNew(ExecutionJobVertex.class).withArguments(any(ExecutionGraph.class), same(v3), any(Integer.class).intValue(), any(FiniteDuration.class), any(Long.class).longValue()).thenReturn(mockEJV[2]);
    PowerMockito.whenNew(ExecutionJobVertex.class).withArguments(any(ExecutionGraph.class), same(v4), any(Integer.class).intValue(), any(FiniteDuration.class), any(Long.class).longValue()).thenReturn(mockEJV[3]);
    PowerMockito.whenNew(ExecutionJobVertex.class).withArguments(any(ExecutionGraph.class), same(v5), any(Integer.class).intValue(), any(FiniteDuration.class), any(Long.class).longValue()).thenReturn(mockEJV[4]);
    v1.setParallelism(dop[0]);
    v2.setParallelism(dop[1]);
    v3.setParallelism(dop[2]);
    v4.setParallelism(dop[3]);
    v5.setParallelism(dop[4]);
    v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
    mockNumberOfInputs(1, 0);
    v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
    mockNumberOfInputs(3, 1);
    v4.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
    mockNumberOfInputs(3, 2);
    v5.connectNewDataSetAsInput(v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
    mockNumberOfInputs(4, 3);
    v5.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
    mockNumberOfInputs(4, 2);
    List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5));
    eg = new ExecutionGraph(TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jobId, jobName, cfg, new SerializedValue<>(new ExecutionConfig()), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new Scheduler(TestingUtils.defaultExecutionContext()));
    eg.attachJobGraph(ordered);
    f = eg.getClass().getDeclaredField("state");
    f.setAccessible(true);
}
Also used : Configuration(org.apache.flink.configuration.Configuration) Scheduler(org.apache.flink.runtime.jobmanager.scheduler.Scheduler) ArrayList(java.util.ArrayList) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) SerializedValue(org.apache.flink.util.SerializedValue) NoRestartStrategy(org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) JobID(org.apache.flink.api.common.JobID) Before(org.junit.Before)

Example 13 with SerializedValue

use of org.apache.flink.util.SerializedValue in project flink by apache.

the class ArchivedExecutionGraphTest method setupExecutionGraph.

@BeforeClass
public static void setupExecutionGraph() throws Exception {
    // -------------------------------------------------------------------------------------------------------------
    // Setup
    // -------------------------------------------------------------------------------------------------------------
    v1ID = new JobVertexID();
    v2ID = new JobVertexID();
    JobVertex v1 = new JobVertex("v1", v1ID);
    JobVertex v2 = new JobVertex("v2", v2ID);
    v1.setParallelism(1);
    v2.setParallelism(2);
    v1.setInvokableClass(AbstractInvokable.class);
    v2.setInvokableClass(AbstractInvokable.class);
    List<JobVertex> vertices = new ArrayList<JobVertex>(Arrays.asList(v1, v2));
    ExecutionConfig config = new ExecutionConfig();
    config.setExecutionMode(ExecutionMode.BATCH_FORCED);
    config.setRestartStrategy(new RestartStrategies.NoRestartStrategyConfiguration());
    config.setParallelism(4);
    config.enableObjectReuse();
    config.setGlobalJobParameters(new TestJobParameters());
    runtimeGraph = new ExecutionGraph(TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), new JobID(), "test job", new Configuration(), new SerializedValue<>(config), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), mock(SlotProvider.class));
    runtimeGraph.attachJobGraph(vertices);
    List<ExecutionJobVertex> jobVertices = new ArrayList<>();
    jobVertices.add(runtimeGraph.getJobVertex(v1ID));
    jobVertices.add(runtimeGraph.getJobVertex(v2ID));
    CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(0, jobVertices, mock(JobSnapshottingSettings.class), new UnregisteredMetricsGroup());
    runtimeGraph.enableCheckpointing(100, 100, 100, 1, ExternalizedCheckpointSettings.none(), Collections.<ExecutionJobVertex>emptyList(), Collections.<ExecutionJobVertex>emptyList(), Collections.<ExecutionJobVertex>emptyList(), new StandaloneCheckpointIDCounter(), new StandaloneCompletedCheckpointStore(1), null, null, statsTracker);
    Map<String, Accumulator<?, ?>> userAccumulators = new HashMap<>();
    userAccumulators.put("userAcc", new LongCounter(64));
    Execution executionWithAccumulators = runtimeGraph.getJobVertex(v1ID).getTaskVertices()[0].getCurrentExecutionAttempt();
    runtimeGraph.getJobVertex(v2ID).getTaskVertices()[0].getCurrentExecutionAttempt().fail(new RuntimeException("This exception was thrown on purpose."));
}
Also used : Accumulator(org.apache.flink.api.common.accumulators.Accumulator) UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) Configuration(org.apache.flink.configuration.Configuration) CheckpointStatsTracker(org.apache.flink.runtime.checkpoint.CheckpointStatsTracker) HashMap(java.util.HashMap) JobVertexID(org.apache.flink.runtime.jobgraph.JobVertexID) ArrayList(java.util.ArrayList) ArchivedExecutionConfig(org.apache.flink.api.common.ArchivedExecutionConfig) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) LongCounter(org.apache.flink.api.common.accumulators.LongCounter) RestartStrategies(org.apache.flink.api.common.restartstrategy.RestartStrategies) JobSnapshottingSettings(org.apache.flink.runtime.jobgraph.tasks.JobSnapshottingSettings) SerializedValue(org.apache.flink.util.SerializedValue) NoRestartStrategy(org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) StandaloneCompletedCheckpointStore(org.apache.flink.runtime.checkpoint.StandaloneCompletedCheckpointStore) JobID(org.apache.flink.api.common.JobID) StandaloneCheckpointIDCounter(org.apache.flink.runtime.checkpoint.StandaloneCheckpointIDCounter) BeforeClass(org.junit.BeforeClass)

Aggregations

SerializedValue (org.apache.flink.util.SerializedValue)13 JobID (org.apache.flink.api.common.JobID)11 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)9 Configuration (org.apache.flink.configuration.Configuration)9 JobVertexID (org.apache.flink.runtime.jobgraph.JobVertexID)8 ExecutionAttemptID (org.apache.flink.runtime.executiongraph.ExecutionAttemptID)7 Test (org.junit.Test)7 TaskDeploymentDescriptor (org.apache.flink.runtime.deployment.TaskDeploymentDescriptor)6 AllocationID (org.apache.flink.runtime.clusterframework.types.AllocationID)5 JobInformation (org.apache.flink.runtime.executiongraph.JobInformation)5 TaskInformation (org.apache.flink.runtime.executiongraph.TaskInformation)5 ArrayList (java.util.ArrayList)4 BlobKey (org.apache.flink.runtime.blob.BlobKey)4 ActorGateway (org.apache.flink.runtime.instance.ActorGateway)4 JavaTestKit (akka.testkit.JavaTestKit)3 BroadcastVariableManager (org.apache.flink.runtime.broadcast.BroadcastVariableManager)3 FileCache (org.apache.flink.runtime.filecache.FileCache)3 IOManager (org.apache.flink.runtime.io.disk.iomanager.IOManager)3 ActorRef (akka.actor.ActorRef)2 IOException (java.io.IOException)2