use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.
the class OperatorTest method testConfigurationOfResource.
@Test
public void testConfigurationOfResource() throws Exception {
Operator operator = new MockOperator();
Method opMethod = Operator.class.getDeclaredMethod("setResources", ResourceSpec.class, ResourceSpec.class);
opMethod.setAccessible(true);
// verify explicit change in resources
ResourceSpec minResources = ResourceSpec.newBuilder(1.0, 100).build();
ResourceSpec preferredResources = ResourceSpec.newBuilder(2.0, 200).build();
opMethod.invoke(operator, minResources, preferredResources);
assertEquals(minResources, operator.getMinResources());
assertEquals(preferredResources, operator.getPreferredResources());
}
use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.
the class JobGraphGeneratorTest method testResourcesForDeltaIteration.
/**
* Verifies that the resources are set onto each job vertex correctly when generating job graph
* which covers the delta iteration case
*/
@Test
public void testResourcesForDeltaIteration() throws Exception {
ResourceSpec resource1 = ResourceSpec.newBuilder(0.1, 100).build();
ResourceSpec resource2 = ResourceSpec.newBuilder(0.2, 200).build();
ResourceSpec resource3 = ResourceSpec.newBuilder(0.3, 300).build();
ResourceSpec resource4 = ResourceSpec.newBuilder(0.4, 400).build();
ResourceSpec resource5 = ResourceSpec.newBuilder(0.5, 500).build();
ResourceSpec resource6 = ResourceSpec.newBuilder(0.6, 600).build();
Method opMethod = Operator.class.getDeclaredMethod("setResources", ResourceSpec.class);
opMethod.setAccessible(true);
Method deltaMethod = DeltaIteration.class.getDeclaredMethod("setResources", ResourceSpec.class);
deltaMethod.setAccessible(true);
Method sinkMethod = DataSink.class.getDeclaredMethod("setResources", ResourceSpec.class);
sinkMethod.setAccessible(true);
MapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> mapFunction = new MapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>>() {
@Override
public Tuple2<Long, Long> map(Tuple2<Long, Long> value) throws Exception {
return value;
}
};
FilterFunction<Tuple2<Long, Long>> filterFunction = new FilterFunction<Tuple2<Long, Long>>() {
@Override
public boolean filter(Tuple2<Long, Long> value) throws Exception {
return false;
}
};
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<Long, Long>> input = env.fromElements(new Tuple2<>(1L, 2L));
opMethod.invoke(input, resource1);
// CHAIN(Map -> Filter)
DataSet<Tuple2<Long, Long>> map = input.map(mapFunction);
opMethod.invoke(map, resource2);
DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration = map.iterateDelta(map, 100, 0).registerAggregator("test", new LongSumAggregator());
deltaMethod.invoke(iteration, resource3);
DataSet<Tuple2<Long, Long>> delta = iteration.getWorkset().map(mapFunction);
opMethod.invoke(delta, resource4);
DataSet<Tuple2<Long, Long>> feedback = delta.filter(filterFunction);
opMethod.invoke(feedback, resource5);
DataSink<Tuple2<Long, Long>> sink = iteration.closeWith(delta, feedback).output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
sinkMethod.invoke(sink, resource6);
JobGraph jobGraph = compileJob(env);
JobVertex sourceMapVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(0);
JobVertex iterationHeadVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(1);
JobVertex deltaVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(2);
JobVertex iterationTailVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(3);
JobVertex feedbackVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(4);
JobVertex sinkVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(5);
JobVertex iterationSyncVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(6);
assertTrue(sourceMapVertex.getMinResources().equals(resource1.merge(resource2)));
assertTrue(iterationHeadVertex.getPreferredResources().equals(resource3));
assertTrue(deltaVertex.getMinResources().equals(resource4));
// the iteration tail task will be scheduled in the same instance with iteration head, and
// currently not set resources.
assertTrue(iterationTailVertex.getPreferredResources().equals(ResourceSpec.DEFAULT));
assertTrue(feedbackVertex.getMinResources().equals(resource5));
assertTrue(sinkVertex.getPreferredResources().equals(resource6));
assertTrue(iterationSyncVertex.getMinResources().equals(resource3));
}
use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.
the class ResourceProfileTest method testFromSpecWithSerializationCopy.
@Test
public void testFromSpecWithSerializationCopy() throws Exception {
final ResourceSpec copiedSpec = CommonTestUtils.createCopySerializable(ResourceSpec.UNKNOWN);
final ResourceProfile profile = ResourceProfile.fromResourceSpec(copiedSpec);
assertEquals(ResourceProfile.fromResourceSpec(ResourceSpec.UNKNOWN), profile);
}
use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.
the class ResourceProfileTest method testAllFieldsNoLessThanProfile.
@Test
public void testAllFieldsNoLessThanProfile() {
final ResourceProfile rp1 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).build();
final ResourceProfile rp2 = ResourceProfile.newBuilder().setCpuCores(1.0).setTaskHeapMemoryMB(200).setTaskOffHeapMemoryMB(200).setManagedMemoryMB(200).build();
final ResourceProfile rp3 = ResourceProfile.newBuilder().setCpuCores(2.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).build();
final ResourceProfile rp4 = ResourceProfile.newBuilder().setCpuCores(2.0).setTaskHeapMemoryMB(200).setTaskOffHeapMemoryMB(200).setManagedMemoryMB(200).build();
assertFalse(rp1.allFieldsNoLessThan(rp2));
assertTrue(rp2.allFieldsNoLessThan(rp1));
assertFalse(rp1.allFieldsNoLessThan(rp3));
assertTrue(rp3.allFieldsNoLessThan(rp1));
assertFalse(rp2.allFieldsNoLessThan(rp3));
assertFalse(rp3.allFieldsNoLessThan(rp2));
assertTrue(rp4.allFieldsNoLessThan(rp1));
assertTrue(rp4.allFieldsNoLessThan(rp2));
assertTrue(rp4.allFieldsNoLessThan(rp3));
assertTrue(rp4.allFieldsNoLessThan(rp4));
final ResourceProfile rp5 = ResourceProfile.newBuilder().setCpuCores(2.0).setTaskHeapMemoryMB(100).setTaskOffHeapMemoryMB(100).setManagedMemoryMB(100).setNetworkMemoryMB(100).build();
assertFalse(rp4.allFieldsNoLessThan(rp5));
ResourceSpec rs1 = ResourceSpec.newBuilder(1.0, 100).setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2.2)).build();
ResourceSpec rs2 = ResourceSpec.newBuilder(1.0, 100).setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1.1)).build();
assertFalse(rp1.allFieldsNoLessThan(ResourceProfile.fromResourceSpec(rs1)));
assertTrue(ResourceProfile.fromResourceSpec(rs1).allFieldsNoLessThan(ResourceProfile.fromResourceSpec(rs2)));
assertFalse(ResourceProfile.fromResourceSpec(rs2).allFieldsNoLessThan(ResourceProfile.fromResourceSpec(rs1)));
}
use of org.apache.flink.api.common.operators.ResourceSpec in project flink by apache.
the class StreamingJobGraphGeneratorTest method testResourcesForIteration.
/**
* Verifies that the resources are merged correctly for chained operators (covers middle
* chaining and iteration cases) when generating job graph.
*/
@Test
public void testResourcesForIteration() throws Exception {
ResourceSpec resource1 = ResourceSpec.newBuilder(0.1, 100).build();
ResourceSpec resource2 = ResourceSpec.newBuilder(0.2, 200).build();
ResourceSpec resource3 = ResourceSpec.newBuilder(0.3, 300).build();
ResourceSpec resource4 = ResourceSpec.newBuilder(0.4, 400).build();
ResourceSpec resource5 = ResourceSpec.newBuilder(0.5, 500).build();
Method opMethod = getSetResourcesMethodAndSetAccessible(SingleOutputStreamOperator.class);
Method sinkMethod = getSetResourcesMethodAndSetAccessible(DataStreamSink.class);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> source = env.addSource(new ParallelSourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
}
@Override
public void cancel() {
}
}).name("test_source");
opMethod.invoke(source, resource1);
IterativeStream<Integer> iteration = source.iterate(3000);
opMethod.invoke(iteration, resource2);
DataStream<Integer> flatMap = iteration.flatMap(new FlatMapFunction<Integer, Integer>() {
@Override
public void flatMap(Integer value, Collector<Integer> out) throws Exception {
out.collect(value);
}
}).name("test_flatMap");
opMethod.invoke(flatMap, resource3);
// CHAIN(flatMap -> Filter)
DataStream<Integer> increment = flatMap.filter(new FilterFunction<Integer>() {
@Override
public boolean filter(Integer value) throws Exception {
return false;
}
}).name("test_filter");
opMethod.invoke(increment, resource4);
DataStreamSink<Integer> sink = iteration.closeWith(increment).addSink(new SinkFunction<Integer>() {
@Override
public void invoke(Integer value) throws Exception {
}
}).disableChaining().name("test_sink");
sinkMethod.invoke(sink, resource5);
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());
for (JobVertex jobVertex : jobGraph.getVertices()) {
if (jobVertex.getName().contains("test_source")) {
assertTrue(jobVertex.getMinResources().equals(resource1));
} else if (jobVertex.getName().contains("Iteration_Source")) {
assertTrue(jobVertex.getPreferredResources().equals(resource2));
} else if (jobVertex.getName().contains("test_flatMap")) {
assertTrue(jobVertex.getMinResources().equals(resource3.merge(resource4)));
} else if (jobVertex.getName().contains("Iteration_Tail")) {
assertTrue(jobVertex.getPreferredResources().equals(ResourceSpec.DEFAULT));
} else if (jobVertex.getName().contains("test_sink")) {
assertTrue(jobVertex.getMinResources().equals(resource5));
}
}
}
Aggregations