Search in sources :

Example 71 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class TypeFillTest method test.

@Test
public void test() {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    try {
        env.addSource(new TestSource<Integer>()).print();
        fail();
    } catch (Exception ignored) {
    }
    DataStream<Long> source = env.generateSequence(1, 10);
    try {
        source.map(new TestMap<Long, Long>()).print();
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.flatMap(new TestFlatMap<Long, Long>()).print();
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.connect(source).map(new TestCoMap<Long, Long, Integer>()).print();
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.connect(source).flatMap(new TestCoFlatMap<Long, Long, Integer>()).print();
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.keyBy(new TestKeySelector<Long, String>()).print();
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.connect(source).keyBy(new TestKeySelector<Long, String>(), new TestKeySelector<>());
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.coGroup(source).where(new TestKeySelector<>()).equalTo(new TestKeySelector<>());
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.join(source).where(new TestKeySelector<>()).equalTo(new TestKeySelector<>());
        fail();
    } catch (Exception ignored) {
    }
    try {
        source.keyBy((in) -> in).intervalJoin(source.keyBy((in) -> in)).between(Time.milliseconds(10L), Time.milliseconds(10L)).process(new TestProcessJoinFunction<>()).print();
        fail();
    } catch (Exception ignored) {
    }
    env.addSource(new TestSource<Integer>()).returns(Integer.class);
    source.map(new TestMap<Long, Long>()).returns(Long.class).print();
    source.flatMap(new TestFlatMap<Long, Long>()).returns(new TypeHint<Long>() {
    }).print();
    source.connect(source).map(new TestCoMap<Long, Long, Integer>()).returns(BasicTypeInfo.INT_TYPE_INFO).print();
    source.connect(source).flatMap(new TestCoFlatMap<Long, Long, Integer>()).returns(BasicTypeInfo.INT_TYPE_INFO).print();
    source.connect(source).keyBy(new TestKeySelector<>(), new TestKeySelector<>(), Types.STRING);
    source.coGroup(source).where(new TestKeySelector<>(), Types.STRING).equalTo(new TestKeySelector<>(), Types.STRING);
    source.join(source).where(new TestKeySelector<>(), Types.STRING).equalTo(new TestKeySelector<>(), Types.STRING);
    source.keyBy((in) -> in).intervalJoin(source.keyBy((in) -> in)).between(Time.milliseconds(10L), Time.milliseconds(10L)).process(new TestProcessJoinFunction<Long, Long, String>()).returns(Types.STRING);
    source.keyBy((in) -> in).intervalJoin(source.keyBy((in) -> in)).between(Time.milliseconds(10L), Time.milliseconds(10L)).process(new TestProcessJoinFunction<>(), Types.STRING);
    assertEquals(BasicTypeInfo.LONG_TYPE_INFO, source.map(new TestMap<Long, Long>()).returns(Long.class).getType());
    SingleOutputStreamOperator<String> map = source.map(new MapFunction<Long, String>() {

        @Override
        public String map(Long value) throws Exception {
            return null;
        }
    });
    map.print();
    try {
        map.returns(String.class);
        fail();
    } catch (Exception ignored) {
    }
}
Also used : Types(org.apache.flink.api.common.typeinfo.Types) CoMapFunction(org.apache.flink.streaming.api.functions.co.CoMapFunction) ProcessJoinFunction(org.apache.flink.streaming.api.functions.co.ProcessJoinFunction) Time(org.apache.flink.streaming.api.windowing.time.Time) KeySelector(org.apache.flink.api.java.functions.KeySelector) SingleOutputStreamOperator(org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator) CoFlatMapFunction(org.apache.flink.streaming.api.functions.co.CoFlatMapFunction) Test(org.junit.Test) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) MapFunction(org.apache.flink.api.common.functions.MapFunction) FlatMapFunction(org.apache.flink.api.common.functions.FlatMapFunction) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) DataStream(org.apache.flink.streaming.api.datastream.DataStream) SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) Collector(org.apache.flink.util.Collector) Assert.fail(org.junit.Assert.fail) Assert.assertEquals(org.junit.Assert.assertEquals) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Test(org.junit.Test)

Example 72 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class StreamGraphGeneratorTest method testMaxParallelismForwarding.

/**
 * Tests that the global and operator-wide max parallelism setting is respected.
 */
@Test
public void testMaxParallelismForwarding() {
    int globalMaxParallelism = 42;
    int keyedResult2MaxParallelism = 17;
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.getConfig().setMaxParallelism(globalMaxParallelism);
    DataStream<Integer> source = env.fromElements(1, 2, 3);
    DataStream<Integer> keyedResult1 = source.keyBy(value -> value).map(new NoOpIntMap());
    DataStream<Integer> keyedResult2 = keyedResult1.keyBy(value -> value).map(new NoOpIntMap()).setMaxParallelism(keyedResult2MaxParallelism);
    keyedResult2.addSink(new DiscardingSink<>());
    StreamGraph graph = env.getStreamGraph();
    StreamNode keyedResult1Node = graph.getStreamNode(keyedResult1.getId());
    StreamNode keyedResult2Node = graph.getStreamNode(keyedResult2.getId());
    assertEquals(globalMaxParallelism, keyedResult1Node.getMaxParallelism());
    assertEquals(keyedResult2MaxParallelism, keyedResult2Node.getMaxParallelism());
}
Also used : Arrays(java.util.Arrays) Tuple2(org.apache.flink.api.java.tuple.Tuple2) BroadcastPartitioner(org.apache.flink.streaming.runtime.partitioner.BroadcastPartitioner) SlotSharingGroup(org.apache.flink.api.common.operators.SlotSharingGroup) KeyedBroadcastProcessFunction(org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) ShufflePartitioner(org.apache.flink.streaming.runtime.partitioner.ShufflePartitioner) ChainingStrategy(org.apache.flink.streaming.api.operators.ChainingStrategy) ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) ManagedMemoryUseCase(org.apache.flink.core.memory.ManagedMemoryUseCase) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) Function(org.apache.flink.api.common.functions.Function) Assertions(org.assertj.core.api.Assertions) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) CoMapFunction(org.apache.flink.streaming.api.functions.co.CoMapFunction) PartitionTransformation(org.apache.flink.streaming.api.transformations.PartitionTransformation) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) StreamTask(org.apache.flink.streaming.runtime.tasks.StreamTask) Collection(java.util.Collection) ConnectedStreams(org.apache.flink.streaming.api.datastream.ConnectedStreams) TypeSafeMatcher(org.hamcrest.TypeSafeMatcher) ResourceProfile(org.apache.flink.runtime.clusterframework.types.ResourceProfile) GlobalPartitioner(org.apache.flink.streaming.runtime.partitioner.GlobalPartitioner) List(java.util.List) NoOpIntMap(org.apache.flink.streaming.util.NoOpIntMap) Matchers.equalTo(org.hamcrest.Matchers.equalTo) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) CheckpointConfig(org.apache.flink.streaming.api.environment.CheckpointConfig) Matchers.is(org.hamcrest.Matchers.is) OneInputStreamOperator(org.apache.flink.streaming.api.operators.OneInputStreamOperator) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) MultipleInputTransformation(org.apache.flink.streaming.api.transformations.MultipleInputTransformation) IterativeStream(org.apache.flink.streaming.api.datastream.IterativeStream) BroadcastStream(org.apache.flink.streaming.api.datastream.BroadcastStream) AbstractUdfStreamOperator(org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator) StreamOperatorFactory(org.apache.flink.streaming.api.operators.StreamOperatorFactory) Watermark(org.apache.flink.streaming.api.watermark.Watermark) SavepointConfigOptions(org.apache.flink.runtime.jobgraph.SavepointConfigOptions) HashMap(java.util.HashMap) MapStateDescriptor(org.apache.flink.api.common.state.MapStateDescriptor) ArrayList(java.util.ArrayList) StreamPartitioner(org.apache.flink.streaming.runtime.partitioner.StreamPartitioner) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Collector(org.apache.flink.util.Collector) Matchers.iterableWithSize(org.hamcrest.Matchers.iterableWithSize) Output(org.apache.flink.streaming.api.operators.Output) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestExpandingSink(org.apache.flink.streaming.util.TestExpandingSink) RebalancePartitioner(org.apache.flink.streaming.runtime.partitioner.RebalancePartitioner) Description(org.hamcrest.Description) TwoInputStreamOperator(org.apache.flink.streaming.api.operators.TwoInputStreamOperator) DiscardingSink(org.apache.flink.streaming.api.functions.sink.DiscardingSink) Assert.assertNotNull(org.junit.Assert.assertNotNull) Configuration(org.apache.flink.configuration.Configuration) SingleOutputStreamOperator(org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator) Assert.assertTrue(org.junit.Assert.assertTrue) StreamOperatorParameters(org.apache.flink.streaming.api.operators.StreamOperatorParameters) Test(org.junit.Test) AbstractStreamOperator(org.apache.flink.streaming.api.operators.AbstractStreamOperator) DataStream(org.apache.flink.streaming.api.datastream.DataStream) StreamOperator(org.apache.flink.streaming.api.operators.StreamOperator) FeatureMatcher(org.hamcrest.FeatureMatcher) StreamExchangeMode(org.apache.flink.streaming.api.transformations.StreamExchangeMode) BroadcastProcessFunction(org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction) Matcher(org.hamcrest.Matcher) Transformation(org.apache.flink.api.dag.Transformation) LatencyMarker(org.apache.flink.streaming.runtime.streamrecord.LatencyMarker) SavepointRestoreSettings(org.apache.flink.runtime.jobgraph.SavepointRestoreSettings) OutputTypeConfigurable(org.apache.flink.streaming.api.operators.OutputTypeConfigurable) StreamSource(org.apache.flink.streaming.api.operators.StreamSource) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) NoOpIntMap(org.apache.flink.streaming.util.NoOpIntMap) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Test(org.junit.Test)

Example 73 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class StreamGraphGeneratorTest method testConfigureSlotSharingGroupResource.

@Test
public void testConfigureSlotSharingGroupResource() {
    final SlotSharingGroup ssg1 = SlotSharingGroup.newBuilder("ssg1").setCpuCores(1).setTaskHeapMemoryMB(100).build();
    final SlotSharingGroup ssg2 = SlotSharingGroup.newBuilder("ssg2").setCpuCores(2).setTaskHeapMemoryMB(200).build();
    final SlotSharingGroup ssg3 = SlotSharingGroup.newBuilder(StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP).setCpuCores(3).setTaskHeapMemoryMB(300).build();
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    final DataStream<Integer> source = env.fromElements(1).slotSharingGroup("ssg1");
    source.map(value -> value).slotSharingGroup(ssg2).map(value -> value * 2).map(value -> value * 3).slotSharingGroup(SlotSharingGroup.newBuilder("ssg4").build()).map(value -> value * 4).slotSharingGroup(ssg3).addSink(new DiscardingSink<>()).slotSharingGroup(ssg1);
    final StreamGraph streamGraph = env.getStreamGraph();
    assertThat(streamGraph.getSlotSharingGroupResource("ssg1").get(), is(ResourceProfile.fromResources(1, 100)));
    assertThat(streamGraph.getSlotSharingGroupResource("ssg2").get(), is(ResourceProfile.fromResources(2, 200)));
    assertThat(streamGraph.getSlotSharingGroupResource(StreamGraphGenerator.DEFAULT_SLOT_SHARING_GROUP).get(), is(ResourceProfile.fromResources(3, 300)));
}
Also used : Arrays(java.util.Arrays) Tuple2(org.apache.flink.api.java.tuple.Tuple2) BroadcastPartitioner(org.apache.flink.streaming.runtime.partitioner.BroadcastPartitioner) SlotSharingGroup(org.apache.flink.api.common.operators.SlotSharingGroup) KeyedBroadcastProcessFunction(org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) ShufflePartitioner(org.apache.flink.streaming.runtime.partitioner.ShufflePartitioner) ChainingStrategy(org.apache.flink.streaming.api.operators.ChainingStrategy) ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) ManagedMemoryUseCase(org.apache.flink.core.memory.ManagedMemoryUseCase) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) Function(org.apache.flink.api.common.functions.Function) Assertions(org.assertj.core.api.Assertions) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) CoMapFunction(org.apache.flink.streaming.api.functions.co.CoMapFunction) PartitionTransformation(org.apache.flink.streaming.api.transformations.PartitionTransformation) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) StreamTask(org.apache.flink.streaming.runtime.tasks.StreamTask) Collection(java.util.Collection) ConnectedStreams(org.apache.flink.streaming.api.datastream.ConnectedStreams) TypeSafeMatcher(org.hamcrest.TypeSafeMatcher) ResourceProfile(org.apache.flink.runtime.clusterframework.types.ResourceProfile) GlobalPartitioner(org.apache.flink.streaming.runtime.partitioner.GlobalPartitioner) List(java.util.List) NoOpIntMap(org.apache.flink.streaming.util.NoOpIntMap) Matchers.equalTo(org.hamcrest.Matchers.equalTo) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) CheckpointConfig(org.apache.flink.streaming.api.environment.CheckpointConfig) Matchers.is(org.hamcrest.Matchers.is) OneInputStreamOperator(org.apache.flink.streaming.api.operators.OneInputStreamOperator) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) MultipleInputTransformation(org.apache.flink.streaming.api.transformations.MultipleInputTransformation) IterativeStream(org.apache.flink.streaming.api.datastream.IterativeStream) BroadcastStream(org.apache.flink.streaming.api.datastream.BroadcastStream) AbstractUdfStreamOperator(org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator) StreamOperatorFactory(org.apache.flink.streaming.api.operators.StreamOperatorFactory) Watermark(org.apache.flink.streaming.api.watermark.Watermark) SavepointConfigOptions(org.apache.flink.runtime.jobgraph.SavepointConfigOptions) HashMap(java.util.HashMap) MapStateDescriptor(org.apache.flink.api.common.state.MapStateDescriptor) ArrayList(java.util.ArrayList) StreamPartitioner(org.apache.flink.streaming.runtime.partitioner.StreamPartitioner) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Collector(org.apache.flink.util.Collector) Matchers.iterableWithSize(org.hamcrest.Matchers.iterableWithSize) Output(org.apache.flink.streaming.api.operators.Output) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestExpandingSink(org.apache.flink.streaming.util.TestExpandingSink) RebalancePartitioner(org.apache.flink.streaming.runtime.partitioner.RebalancePartitioner) Description(org.hamcrest.Description) TwoInputStreamOperator(org.apache.flink.streaming.api.operators.TwoInputStreamOperator) DiscardingSink(org.apache.flink.streaming.api.functions.sink.DiscardingSink) Assert.assertNotNull(org.junit.Assert.assertNotNull) Configuration(org.apache.flink.configuration.Configuration) SingleOutputStreamOperator(org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator) Assert.assertTrue(org.junit.Assert.assertTrue) StreamOperatorParameters(org.apache.flink.streaming.api.operators.StreamOperatorParameters) Test(org.junit.Test) AbstractStreamOperator(org.apache.flink.streaming.api.operators.AbstractStreamOperator) DataStream(org.apache.flink.streaming.api.datastream.DataStream) StreamOperator(org.apache.flink.streaming.api.operators.StreamOperator) FeatureMatcher(org.hamcrest.FeatureMatcher) StreamExchangeMode(org.apache.flink.streaming.api.transformations.StreamExchangeMode) BroadcastProcessFunction(org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction) Matcher(org.hamcrest.Matcher) Transformation(org.apache.flink.api.dag.Transformation) LatencyMarker(org.apache.flink.streaming.runtime.streamrecord.LatencyMarker) SavepointRestoreSettings(org.apache.flink.runtime.jobgraph.SavepointRestoreSettings) OutputTypeConfigurable(org.apache.flink.streaming.api.operators.OutputTypeConfigurable) StreamSource(org.apache.flink.streaming.api.operators.StreamSource) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) DiscardingSink(org.apache.flink.streaming.api.functions.sink.DiscardingSink) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) SlotSharingGroup(org.apache.flink.api.common.operators.SlotSharingGroup) Test(org.junit.Test)

Example 74 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest method testGlobalExchangeModeDoesNotOverrideSpecifiedExchangeMode.

@Test
public void testGlobalExchangeModeDoesNotOverrideSpecifiedExchangeMode() {
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    final DataStream<Integer> source = env.fromElements(1, 2, 3).setParallelism(1);
    final DataStream<Integer> forward = new DataStream<>(env, new PartitionTransformation<>(source.getTransformation(), new ForwardPartitioner<>(), StreamExchangeMode.PIPELINED));
    forward.map(i -> i).startNewChain().setParallelism(1);
    final StreamGraph streamGraph = env.getStreamGraph();
    streamGraph.setGlobalStreamExchangeMode(GlobalStreamExchangeMode.ALL_EDGES_BLOCKING);
    final JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph);
    final List<JobVertex> verticesSorted = jobGraph.getVerticesSortedTopologicallyFromSources();
    final JobVertex sourceVertex = verticesSorted.get(0);
    assertEquals(ResultPartitionType.PIPELINED_BOUNDED, sourceVertex.getProducedDataSets().get(0).getResultType());
}
Also used : JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) DataStream(org.apache.flink.streaming.api.datastream.DataStream) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) ForwardPartitioner(org.apache.flink.streaming.runtime.partitioner.ForwardPartitioner) Test(org.junit.Test)

Example 75 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class StreamingJobGraphGeneratorWithGlobalStreamExchangeModeTest method createStreamGraph.

/**
 * Topology: source(parallelism=1) --(forward)--> map1(parallelism=1) --(rescale)-->
 * map2(parallelism=2) --(rebalance)--> sink(parallelism=2).
 */
private static StreamGraph createStreamGraph(GlobalStreamExchangeMode globalStreamExchangeMode) {
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    if (globalStreamExchangeMode != GlobalStreamExchangeMode.ALL_EDGES_PIPELINED) {
        env.setBufferTimeout(-1);
    }
    final DataStream<Integer> source = env.fromElements(1, 2, 3).setParallelism(1);
    final DataStream<Integer> forward = new DataStream<>(env, new PartitionTransformation<>(source.getTransformation(), new ForwardPartitioner<>(), StreamExchangeMode.UNDEFINED));
    final DataStream<Integer> map1 = forward.map(i -> i).startNewChain().setParallelism(1);
    final DataStream<Integer> rescale = new DataStream<>(env, new PartitionTransformation<>(map1.getTransformation(), new RescalePartitioner<>(), StreamExchangeMode.UNDEFINED));
    final DataStream<Integer> map2 = rescale.map(i -> i).setParallelism(2);
    map2.rebalance().print().setParallelism(2);
    final StreamGraph streamGraph = env.getStreamGraph();
    streamGraph.setGlobalStreamExchangeMode(globalStreamExchangeMode);
    return streamGraph;
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) PartitionTransformation(org.apache.flink.streaming.api.transformations.PartitionTransformation) ForwardPartitioner(org.apache.flink.streaming.runtime.partitioner.ForwardPartitioner) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) ResultPartitionType(org.apache.flink.runtime.io.network.partition.ResultPartitionType) Test(org.junit.Test) DataStream(org.apache.flink.streaming.api.datastream.DataStream) RescalePartitioner(org.apache.flink.streaming.runtime.partitioner.RescalePartitioner) Assert.assertThat(org.junit.Assert.assertThat) StreamExchangeMode(org.apache.flink.streaming.api.transformations.StreamExchangeMode) List(java.util.List) TestLogger(org.apache.flink.util.TestLogger) Assert.assertEquals(org.junit.Assert.assertEquals) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) DataStream(org.apache.flink.streaming.api.datastream.DataStream) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) ForwardPartitioner(org.apache.flink.streaming.runtime.partitioner.ForwardPartitioner) RescalePartitioner(org.apache.flink.streaming.runtime.partitioner.RescalePartitioner)

Aggregations

DataStream (org.apache.flink.streaming.api.datastream.DataStream)87 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)78 Test (org.junit.Test)70 List (java.util.List)62 Collector (org.apache.flink.util.Collector)60 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)50 SingleOutputStreamOperator (org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator)48 Arrays (java.util.Arrays)46 ArrayList (java.util.ArrayList)40 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)40 Assert.assertEquals (org.junit.Assert.assertEquals)38 WatermarkStrategy (org.apache.flink.api.common.eventtime.WatermarkStrategy)36 Configuration (org.apache.flink.configuration.Configuration)36 Assert.assertTrue (org.junit.Assert.assertTrue)33 BasicTypeInfo (org.apache.flink.api.common.typeinfo.BasicTypeInfo)32 StreamOperator (org.apache.flink.streaming.api.operators.StreamOperator)32 Types (org.apache.flink.api.common.typeinfo.Types)31 Assert (org.junit.Assert)31 ReduceFunction (org.apache.flink.api.common.functions.ReduceFunction)29 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)29