Search in sources :

Example 1 with ResourceSpec

use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.

the class JobGraphGeneratorTest method testResourcesForChainedOperators.

/**
 * Verifies that the resources are merged correctly for chained operators when generating job
 * graph
 */
@Test
public void testResourcesForChainedOperators() throws Exception {
    ResourceSpec resource1 = ResourceSpec.newBuilder(0.1, 100).build();
    ResourceSpec resource2 = ResourceSpec.newBuilder(0.2, 200).build();
    ResourceSpec resource3 = ResourceSpec.newBuilder(0.3, 300).build();
    ResourceSpec resource4 = ResourceSpec.newBuilder(0.4, 400).build();
    ResourceSpec resource5 = ResourceSpec.newBuilder(0.5, 500).build();
    ResourceSpec resource6 = ResourceSpec.newBuilder(0.6, 600).build();
    ResourceSpec resource7 = ResourceSpec.newBuilder(0.7, 700).build();
    Method opMethod = Operator.class.getDeclaredMethod("setResources", ResourceSpec.class);
    opMethod.setAccessible(true);
    Method sinkMethod = DataSink.class.getDeclaredMethod("setResources", ResourceSpec.class);
    sinkMethod.setAccessible(true);
    MapFunction<Long, Long> mapFunction = new MapFunction<Long, Long>() {

        @Override
        public Long map(Long value) throws Exception {
            return value;
        }
    };
    FilterFunction<Long> filterFunction = new FilterFunction<Long>() {

        @Override
        public boolean filter(Long value) throws Exception {
            return false;
        }
    };
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<Long> input = env.fromElements(1L, 2L, 3L);
    opMethod.invoke(input, resource1);
    DataSet<Long> map1 = input.map(mapFunction);
    opMethod.invoke(map1, resource2);
    // CHAIN(Source -> Map -> Filter)
    DataSet<Long> filter1 = map1.filter(filterFunction);
    opMethod.invoke(filter1, resource3);
    IterativeDataSet<Long> startOfIteration = filter1.iterate(10);
    opMethod.invoke(startOfIteration, resource4);
    DataSet<Long> map2 = startOfIteration.map(mapFunction);
    opMethod.invoke(map2, resource5);
    // CHAIN(Map -> Filter)
    DataSet<Long> feedback = map2.filter(filterFunction);
    opMethod.invoke(feedback, resource6);
    DataSink<Long> sink = startOfIteration.closeWith(feedback).output(new DiscardingOutputFormat<Long>());
    sinkMethod.invoke(sink, resource7);
    JobGraph jobGraph = compileJob(env);
    JobVertex sourceMapFilterVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(0);
    JobVertex iterationHeadVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(1);
    JobVertex feedbackVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(2);
    JobVertex sinkVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(3);
    JobVertex iterationSyncVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(4);
    assertTrue(sourceMapFilterVertex.getMinResources().equals(resource1.merge(resource2).merge(resource3)));
    assertTrue(iterationHeadVertex.getPreferredResources().equals(resource4));
    assertTrue(feedbackVertex.getMinResources().equals(resource5.merge(resource6)));
    assertTrue(sinkVertex.getPreferredResources().equals(resource7));
    assertTrue(iterationSyncVertex.getMinResources().equals(resource4));
}
Also used : FilterFunction(org.apache.flink.api.common.functions.FilterFunction) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) Method(java.lang.reflect.Method) MapFunction(org.apache.flink.api.common.functions.MapFunction) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) Test(org.junit.Test)

Example 2 with ResourceSpec

use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.

the class DispatcherTest method testJobSubmissionWithPartialResourceConfigured.

/**
 * Tests that we can submit a job to the Dispatcher which then spawns a new JobManagerRunner.
 */
@Test
public void testJobSubmissionWithPartialResourceConfigured() throws Exception {
    ResourceSpec resourceSpec = ResourceSpec.newBuilder(2.0, 10).build();
    final JobVertex firstVertex = new JobVertex("firstVertex");
    firstVertex.setInvokableClass(NoOpInvokable.class);
    firstVertex.setResources(resourceSpec, resourceSpec);
    final JobVertex secondVertex = new JobVertex("secondVertex");
    secondVertex.setInvokableClass(NoOpInvokable.class);
    JobGraph jobGraphWithTwoVertices = JobGraphTestUtils.streamingJobGraph(firstVertex, secondVertex);
    dispatcher = createAndStartDispatcher(heartbeatServices, haServices, new ExpectedJobIdJobManagerRunnerFactory(jobId, createdJobManagerRunnerLatch));
    DispatcherGateway dispatcherGateway = dispatcher.getSelfGateway(DispatcherGateway.class);
    CompletableFuture<Acknowledge> acknowledgeFuture = dispatcherGateway.submitJob(jobGraphWithTwoVertices, TIMEOUT);
    try {
        acknowledgeFuture.get();
        fail("job submission should have failed");
    } catch (ExecutionException e) {
        assertTrue(ExceptionUtils.findThrowable(e, JobSubmissionException.class).isPresent());
    }
}
Also used : JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) Acknowledge(org.apache.flink.runtime.messages.Acknowledge) ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) JobSubmissionException(org.apache.flink.runtime.client.JobSubmissionException) DuplicateJobSubmissionException(org.apache.flink.runtime.client.DuplicateJobSubmissionException) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 3 with ResourceSpec

use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.

the class ResourceProfileTest method testGet.

@Test
public void testGet() {
    ResourceSpec rs = ResourceSpec.newBuilder(1.0, 100).setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.6)).build();
    ResourceProfile rp = ResourceProfile.fromResourceSpec(rs, MemorySize.ofMebiBytes(50));
    assertEquals(new CPUResource(1.0), rp.getCpuCores());
    assertEquals(150, rp.getTotalMemory().getMebiBytes());
    assertEquals(100, rp.getOperatorsMemory().getMebiBytes());
    assertEquals(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.6), rp.getExtendedResources().get(EXTERNAL_RESOURCE_NAME));
}
Also used : ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) CPUResource(org.apache.flink.api.common.resources.CPUResource) ExternalResource(org.apache.flink.api.common.resources.ExternalResource) Test(org.junit.Test)

Example 4 with ResourceSpec

use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.

the class ResourceProfileTest method testEquals.

@Test
public void testEquals() {
    ResourceSpec rs1 = ResourceSpec.newBuilder(1.0, 100).build();
    ResourceSpec rs2 = ResourceSpec.newBuilder(1.0, 100).build();
    assertEquals(ResourceProfile.fromResourceSpec(rs1), ResourceProfile.fromResourceSpec(rs2));
    ResourceSpec rs3 = ResourceSpec.newBuilder(1.0, 100).setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.2)).build();
    ResourceSpec rs4 = ResourceSpec.newBuilder(1.0, 100).setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.1)).build();
    assertNotEquals(ResourceProfile.fromResourceSpec(rs3), ResourceProfile.fromResourceSpec(rs4));
    ResourceSpec rs5 = ResourceSpec.newBuilder(1.0, 100).setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.2)).build();
    MemorySize networkMemory = MemorySize.ofMebiBytes(100);
    assertEquals(ResourceProfile.fromResourceSpec(rs3, networkMemory), ResourceProfile.fromResourceSpec(rs5, networkMemory));
    final ResourceProfile rp1 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).setNetworkMemoryMB(100).build();
    final ResourceProfile rp2 = ResourceProfile.newBuilder().setCpuCores(1.1).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).setNetworkMemoryMB(100).build();
    final ResourceProfile rp3 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(110).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).setNetworkMemoryMB(100).build();
    final ResourceProfile rp4 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(110).setManagedMemoryMB(100).setNetworkMemoryMB(100).build();
    final ResourceProfile rp5 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(110).setNetworkMemoryMB(100).build();
    final ResourceProfile rp6 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(110).setNetworkMemoryMB(100).build();
    final ResourceProfile rp7 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).setNetworkMemoryMB(110).build();
    final ResourceProfile rp8 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).setNetworkMemoryMB(100).build();
    assertNotEquals(rp1, rp2);
    assertNotEquals(rp1, rp3);
    assertNotEquals(rp1, rp4);
    assertNotEquals(rp1, rp5);
    assertNotEquals(rp1, rp6);
    assertNotEquals(rp1, rp7);
    assertEquals(rp1, rp8);
}
Also used : MemorySize(org.apache.flink.configuration.MemorySize) ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) ExternalResource(org.apache.flink.api.common.resources.ExternalResource) Test(org.junit.Test)

Example 5 with ResourceSpec

use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.

the class StreamGraphGeneratorTest method testIteration.

/**
 * Test iteration job, check slot sharing group and co-location group.
 */
@Test
public void testIteration() {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    DataStream<Integer> source = env.fromElements(1, 2, 3).name("source");
    IterativeStream<Integer> iteration = source.iterate(3000);
    iteration.name("iteration").setParallelism(2);
    DataStream<Integer> map = iteration.map(x -> x + 1).name("map").setParallelism(2);
    DataStream<Integer> filter = map.filter((x) -> false).name("filter").setParallelism(2);
    iteration.closeWith(filter).print();
    final ResourceSpec resources = ResourceSpec.newBuilder(1.0, 100).build();
    iteration.getTransformation().setResources(resources, resources);
    StreamGraph streamGraph = env.getStreamGraph();
    for (Tuple2<StreamNode, StreamNode> iterationPair : streamGraph.getIterationSourceSinkPairs()) {
        assertNotNull(iterationPair.f0.getCoLocationGroup());
        assertEquals(iterationPair.f0.getCoLocationGroup(), iterationPair.f1.getCoLocationGroup());
        assertEquals(StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP, iterationPair.f0.getSlotSharingGroup());
        assertEquals(iterationPair.f0.getSlotSharingGroup(), iterationPair.f1.getSlotSharingGroup());
        final ResourceSpec sourceMinResources = iterationPair.f0.getMinResources();
        final ResourceSpec sinkMinResources = iterationPair.f1.getMinResources();
        final ResourceSpec iterationResources = sourceMinResources.merge(sinkMinResources);
        assertThat(iterationResources, equalsResourceSpec(resources));
    }
}
Also used : ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Test(org.junit.Test)

Aggregations

ResourceSpec (org.apache.flink.api.common.operators.ResourceSpec)17 Test (org.junit.Test)15 Method (java.lang.reflect.Method)7 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)7 JobVertex (org.apache.flink.runtime.jobgraph.JobVertex)7 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)5 FilterFunction (org.apache.flink.api.common.functions.FilterFunction)4 ExternalResource (org.apache.flink.api.common.resources.ExternalResource)4 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)4 MapFunction (org.apache.flink.api.common.functions.MapFunction)3 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 Set (java.util.Set)2 FlatMapFunction (org.apache.flink.api.common.functions.FlatMapFunction)2 Transformation (org.apache.flink.api.dag.Transformation)2 MultipleInputTransformation (org.apache.flink.streaming.api.transformations.MultipleInputTransformation)2 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)2 PartitionTransformation (org.apache.flink.streaming.api.transformations.PartitionTransformation)2 Arrays (java.util.Arrays)1