Search in sources :

Example 76 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class StreamGraphGeneratorTest method testResetBatchExchangeModeInStreamingExecution.

@Test
public void testResetBatchExchangeModeInStreamingExecution() {
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    DataStream<Integer> sourceDataStream = env.fromElements(1, 2, 3);
    PartitionTransformation<Integer> transformation = new PartitionTransformation<>(sourceDataStream.getTransformation(), new RebalancePartitioner<>(), StreamExchangeMode.BATCH);
    DataStream<Integer> partitionStream = new DataStream<>(env, transformation);
    partitionStream.map(value -> value).print();
    final StreamGraph streamGraph = env.getStreamGraph();
    Assertions.assertThat(streamGraph.getStreamEdges(1, 3)).hasSize(1).satisfies(e -> Assertions.assertThat(e.get(0).getExchangeMode()).isEqualTo(StreamExchangeMode.UNDEFINED));
}
Also used : Arrays(java.util.Arrays) Tuple2(org.apache.flink.api.java.tuple.Tuple2) BroadcastPartitioner(org.apache.flink.streaming.runtime.partitioner.BroadcastPartitioner) SlotSharingGroup(org.apache.flink.api.common.operators.SlotSharingGroup) KeyedBroadcastProcessFunction(org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) ShufflePartitioner(org.apache.flink.streaming.runtime.partitioner.ShufflePartitioner) ChainingStrategy(org.apache.flink.streaming.api.operators.ChainingStrategy) ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) ManagedMemoryUseCase(org.apache.flink.core.memory.ManagedMemoryUseCase) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) Function(org.apache.flink.api.common.functions.Function) Assertions(org.assertj.core.api.Assertions) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) CoMapFunction(org.apache.flink.streaming.api.functions.co.CoMapFunction) PartitionTransformation(org.apache.flink.streaming.api.transformations.PartitionTransformation) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) StreamTask(org.apache.flink.streaming.runtime.tasks.StreamTask) Collection(java.util.Collection) ConnectedStreams(org.apache.flink.streaming.api.datastream.ConnectedStreams) TypeSafeMatcher(org.hamcrest.TypeSafeMatcher) ResourceProfile(org.apache.flink.runtime.clusterframework.types.ResourceProfile) GlobalPartitioner(org.apache.flink.streaming.runtime.partitioner.GlobalPartitioner) List(java.util.List) NoOpIntMap(org.apache.flink.streaming.util.NoOpIntMap) Matchers.equalTo(org.hamcrest.Matchers.equalTo) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) CheckpointConfig(org.apache.flink.streaming.api.environment.CheckpointConfig) Matchers.is(org.hamcrest.Matchers.is) OneInputStreamOperator(org.apache.flink.streaming.api.operators.OneInputStreamOperator) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) MultipleInputTransformation(org.apache.flink.streaming.api.transformations.MultipleInputTransformation) IterativeStream(org.apache.flink.streaming.api.datastream.IterativeStream) BroadcastStream(org.apache.flink.streaming.api.datastream.BroadcastStream) AbstractUdfStreamOperator(org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator) StreamOperatorFactory(org.apache.flink.streaming.api.operators.StreamOperatorFactory) Watermark(org.apache.flink.streaming.api.watermark.Watermark) SavepointConfigOptions(org.apache.flink.runtime.jobgraph.SavepointConfigOptions) HashMap(java.util.HashMap) MapStateDescriptor(org.apache.flink.api.common.state.MapStateDescriptor) ArrayList(java.util.ArrayList) StreamPartitioner(org.apache.flink.streaming.runtime.partitioner.StreamPartitioner) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Collector(org.apache.flink.util.Collector) Matchers.iterableWithSize(org.hamcrest.Matchers.iterableWithSize) Output(org.apache.flink.streaming.api.operators.Output) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestExpandingSink(org.apache.flink.streaming.util.TestExpandingSink) RebalancePartitioner(org.apache.flink.streaming.runtime.partitioner.RebalancePartitioner) Description(org.hamcrest.Description) TwoInputStreamOperator(org.apache.flink.streaming.api.operators.TwoInputStreamOperator) DiscardingSink(org.apache.flink.streaming.api.functions.sink.DiscardingSink) Assert.assertNotNull(org.junit.Assert.assertNotNull) Configuration(org.apache.flink.configuration.Configuration) SingleOutputStreamOperator(org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator) Assert.assertTrue(org.junit.Assert.assertTrue) StreamOperatorParameters(org.apache.flink.streaming.api.operators.StreamOperatorParameters) Test(org.junit.Test) AbstractStreamOperator(org.apache.flink.streaming.api.operators.AbstractStreamOperator) DataStream(org.apache.flink.streaming.api.datastream.DataStream) StreamOperator(org.apache.flink.streaming.api.operators.StreamOperator) FeatureMatcher(org.hamcrest.FeatureMatcher) StreamExchangeMode(org.apache.flink.streaming.api.transformations.StreamExchangeMode) BroadcastProcessFunction(org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction) Matcher(org.hamcrest.Matcher) Transformation(org.apache.flink.api.dag.Transformation) LatencyMarker(org.apache.flink.streaming.runtime.streamrecord.LatencyMarker) SavepointRestoreSettings(org.apache.flink.runtime.jobgraph.SavepointRestoreSettings) OutputTypeConfigurable(org.apache.flink.streaming.api.operators.OutputTypeConfigurable) StreamSource(org.apache.flink.streaming.api.operators.StreamSource) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) DataStream(org.apache.flink.streaming.api.datastream.DataStream) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) PartitionTransformation(org.apache.flink.streaming.api.transformations.PartitionTransformation) Test(org.junit.Test)

Example 77 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class StateDescriptorPassingTest method testReduceWindowAllState.

@Test
public void testReduceWindowAllState() {
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class);
    // simulate ingestion time
    DataStream<File> src = env.fromElements(new File("/")).assignTimestampsAndWatermarks(WatermarkStrategy.<File>forMonotonousTimestamps().withTimestampAssigner((file, ts) -> System.currentTimeMillis()));
    SingleOutputStreamOperator<?> result = src.windowAll(TumblingEventTimeWindows.of(Time.milliseconds(1000))).reduce(new ReduceFunction<File>() {

        @Override
        public File reduce(File value1, File value2) {
            return null;
        }
    });
    validateStateDescriptorConfigured(result);
}
Also used : Kryo(com.esotericsoftware.kryo.Kryo) Collector(org.apache.flink.util.Collector) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) ProcessAllWindowFunction(org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ReduceFunction(org.apache.flink.api.common.functions.ReduceFunction) JavaSerializer(com.esotericsoftware.kryo.serializers.JavaSerializer) Time(org.apache.flink.streaming.api.windowing.time.Time) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) KeySelector(org.apache.flink.api.java.functions.KeySelector) StateDescriptor(org.apache.flink.api.common.state.StateDescriptor) KryoSerializer(org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer) SingleOutputStreamOperator(org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator) WindowOperator(org.apache.flink.streaming.runtime.operators.windowing.WindowOperator) Assert.assertTrue(org.junit.Assert.assertTrue) WatermarkStrategy(org.apache.flink.api.common.eventtime.WatermarkStrategy) Test(org.junit.Test) ProcessWindowFunction(org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction) OneInputTransformation(org.apache.flink.streaming.api.transformations.OneInputTransformation) File(java.io.File) DataStream(org.apache.flink.streaming.api.datastream.DataStream) WindowFunction(org.apache.flink.streaming.api.functions.windowing.WindowFunction) TumblingEventTimeWindows(org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows) AllWindowFunction(org.apache.flink.streaming.api.functions.windowing.AllWindowFunction) ListSerializer(org.apache.flink.api.common.typeutils.base.ListSerializer) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) File(java.io.File) Test(org.junit.Test)

Example 78 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class StateDescriptorPassingTest method testProcessAllWindowState.

@Test
public void testProcessAllWindowState() {
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class);
    // simulate ingestion time
    DataStream<File> src = env.fromElements(new File("/")).assignTimestampsAndWatermarks(WatermarkStrategy.<File>forMonotonousTimestamps().withTimestampAssigner((file, ts) -> System.currentTimeMillis()));
    SingleOutputStreamOperator<?> result = src.windowAll(TumblingEventTimeWindows.of(Time.milliseconds(1000))).process(new ProcessAllWindowFunction<File, String, TimeWindow>() {

        @Override
        public void process(Context ctx, Iterable<File> input, Collector<String> out) {
        }
    });
    validateListStateDescriptorConfigured(result);
}
Also used : Kryo(com.esotericsoftware.kryo.Kryo) Collector(org.apache.flink.util.Collector) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) ProcessAllWindowFunction(org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ReduceFunction(org.apache.flink.api.common.functions.ReduceFunction) JavaSerializer(com.esotericsoftware.kryo.serializers.JavaSerializer) Time(org.apache.flink.streaming.api.windowing.time.Time) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) KeySelector(org.apache.flink.api.java.functions.KeySelector) StateDescriptor(org.apache.flink.api.common.state.StateDescriptor) KryoSerializer(org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer) SingleOutputStreamOperator(org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator) WindowOperator(org.apache.flink.streaming.runtime.operators.windowing.WindowOperator) Assert.assertTrue(org.junit.Assert.assertTrue) WatermarkStrategy(org.apache.flink.api.common.eventtime.WatermarkStrategy) Test(org.junit.Test) ProcessWindowFunction(org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction) OneInputTransformation(org.apache.flink.streaming.api.transformations.OneInputTransformation) File(java.io.File) DataStream(org.apache.flink.streaming.api.datastream.DataStream) WindowFunction(org.apache.flink.streaming.api.functions.windowing.WindowFunction) TumblingEventTimeWindows(org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows) AllWindowFunction(org.apache.flink.streaming.api.functions.windowing.AllWindowFunction) ListSerializer(org.apache.flink.api.common.typeutils.base.ListSerializer) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) File(java.io.File) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) Test(org.junit.Test)

Example 79 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class AbstractStreamTableEnvironmentImpl method toStreamInternal.

protected <T> DataStream<T> toStreamInternal(Table table, SchemaTranslator.ProducingResult schemaTranslationResult, @Nullable ChangelogMode changelogMode) {
    final CatalogManager catalogManager = getCatalogManager();
    final OperationTreeBuilder operationTreeBuilder = getOperationTreeBuilder();
    final QueryOperation projectOperation = schemaTranslationResult.getProjections().map(projections -> operationTreeBuilder.project(projections.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), table.getQueryOperation())).orElseGet(table::getQueryOperation);
    final ResolvedCatalogTable resolvedCatalogTable = catalogManager.resolveCatalogTable(new ExternalCatalogTable(schemaTranslationResult.getSchema()));
    final ExternalModifyOperation modifyOperation = new ExternalModifyOperation(ContextResolvedTable.anonymous("datastream_sink", resolvedCatalogTable), projectOperation, changelogMode, schemaTranslationResult.getPhysicalDataType().orElseGet(() -> resolvedCatalogTable.getResolvedSchema().toPhysicalRowDataType()));
    return toStreamInternal(table, modifyOperation);
}
Also used : DataType(org.apache.flink.table.types.DataType) CatalogManager(org.apache.flink.table.catalog.CatalogManager) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) Schema(org.apache.flink.table.api.Schema) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) DataStreamQueryOperation(org.apache.flink.table.operations.DataStreamQueryOperation) Tuple2(org.apache.flink.api.java.tuple.Tuple2) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) ExecutorFactory(org.apache.flink.table.delegation.ExecutorFactory) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) Types(org.apache.flink.table.api.Types) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) Planner(org.apache.flink.table.delegation.Planner) ExternalQueryOperation(org.apache.flink.table.operations.ExternalQueryOperation) Expression(org.apache.flink.table.expressions.Expression) TableEnvironmentImpl(org.apache.flink.table.api.internal.TableEnvironmentImpl) OperationTreeBuilder(org.apache.flink.table.operations.utils.OperationTreeBuilder) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) ExternalModifyOperation(org.apache.flink.table.operations.ExternalModifyOperation) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) Nullable(javax.annotation.Nullable) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) TimeCharacteristic(org.apache.flink.streaming.api.TimeCharacteristic) SchemaTranslator(org.apache.flink.table.catalog.SchemaTranslator) ModuleManager(org.apache.flink.table.module.ModuleManager) TableConfig(org.apache.flink.table.api.TableConfig) TableException(org.apache.flink.table.api.TableException) Table(org.apache.flink.table.api.Table) StreamExecutorFactory(org.apache.flink.table.delegation.StreamExecutorFactory) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) FieldInfoUtils(org.apache.flink.table.typeutils.FieldInfoUtils) DataStream(org.apache.flink.streaming.api.datastream.DataStream) ExternalCatalogTable(org.apache.flink.table.catalog.ExternalCatalogTable) List(java.util.List) TypeExtractor(org.apache.flink.api.java.typeutils.TypeExtractor) FactoryUtil(org.apache.flink.table.factories.FactoryUtil) ValidationException(org.apache.flink.table.api.ValidationException) Executor(org.apache.flink.table.delegation.Executor) ApiExpressionUtils(org.apache.flink.table.expressions.ApiExpressionUtils) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Transformation(org.apache.flink.api.dag.Transformation) Collections(java.util.Collections) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) ExternalModifyOperation(org.apache.flink.table.operations.ExternalModifyOperation) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ExternalCatalogTable(org.apache.flink.table.catalog.ExternalCatalogTable) OperationTreeBuilder(org.apache.flink.table.operations.utils.OperationTreeBuilder) ApiExpressionUtils(org.apache.flink.table.expressions.ApiExpressionUtils) CatalogManager(org.apache.flink.table.catalog.CatalogManager) QueryOperation(org.apache.flink.table.operations.QueryOperation) DataStreamQueryOperation(org.apache.flink.table.operations.DataStreamQueryOperation) ExternalQueryOperation(org.apache.flink.table.operations.ExternalQueryOperation)

Example 80 with DataStream

use of org.apache.flink.streaming.api.datastream.DataStream in project flink by apache.

the class CommonExecSink method applySinkProvider.

private Transformation<?> applySinkProvider(Transformation<RowData> inputTransform, StreamExecutionEnvironment env, SinkRuntimeProvider runtimeProvider, int rowtimeFieldIndex, int sinkParallelism, ReadableConfig config) {
    TransformationMetadata sinkMeta = createTransformationMeta(SINK_TRANSFORMATION, config);
    if (runtimeProvider instanceof DataStreamSinkProvider) {
        Transformation<RowData> sinkTransformation = applyRowtimeTransformation(inputTransform, rowtimeFieldIndex, sinkParallelism, config);
        final DataStream<RowData> dataStream = new DataStream<>(env, sinkTransformation);
        final DataStreamSinkProvider provider = (DataStreamSinkProvider) runtimeProvider;
        return provider.consumeDataStream(createProviderContext(), dataStream).getTransformation();
    } else if (runtimeProvider instanceof TransformationSinkProvider) {
        final TransformationSinkProvider provider = (TransformationSinkProvider) runtimeProvider;
        return provider.createTransformation(new TransformationSinkProvider.Context() {

            @Override
            public Transformation<RowData> getInputTransformation() {
                return inputTransform;
            }

            @Override
            public int getRowtimeIndex() {
                return rowtimeFieldIndex;
            }

            @Override
            public Optional<String> generateUid(String name) {
                return createProviderContext().generateUid(name);
            }
        });
    } else if (runtimeProvider instanceof SinkFunctionProvider) {
        final SinkFunction<RowData> sinkFunction = ((SinkFunctionProvider) runtimeProvider).createSinkFunction();
        return createSinkFunctionTransformation(sinkFunction, env, inputTransform, rowtimeFieldIndex, sinkMeta, sinkParallelism);
    } else if (runtimeProvider instanceof OutputFormatProvider) {
        OutputFormat<RowData> outputFormat = ((OutputFormatProvider) runtimeProvider).createOutputFormat();
        final SinkFunction<RowData> sinkFunction = new OutputFormatSinkFunction<>(outputFormat);
        return createSinkFunctionTransformation(sinkFunction, env, inputTransform, rowtimeFieldIndex, sinkMeta, sinkParallelism);
    } else if (runtimeProvider instanceof SinkProvider) {
        Transformation<RowData> sinkTransformation = applyRowtimeTransformation(inputTransform, rowtimeFieldIndex, sinkParallelism, config);
        final DataStream<RowData> dataStream = new DataStream<>(env, sinkTransformation);
        final Transformation<?> transformation = DataStreamSink.forSinkV1(dataStream, ((SinkProvider) runtimeProvider).createSink()).getTransformation();
        transformation.setParallelism(sinkParallelism);
        sinkMeta.fill(transformation);
        return transformation;
    } else if (runtimeProvider instanceof SinkV2Provider) {
        Transformation<RowData> sinkTransformation = applyRowtimeTransformation(inputTransform, rowtimeFieldIndex, sinkParallelism, config);
        final DataStream<RowData> dataStream = new DataStream<>(env, sinkTransformation);
        final Transformation<?> transformation = DataStreamSink.forSink(dataStream, ((SinkV2Provider) runtimeProvider).createSink()).getTransformation();
        transformation.setParallelism(sinkParallelism);
        sinkMeta.fill(transformation);
        return transformation;
    } else {
        throw new TableException("Unsupported sink runtime provider.");
    }
}
Also used : ExecNodeContext(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext) ProviderContext(org.apache.flink.table.connector.ProviderContext) SinkRuntimeProviderContext(org.apache.flink.table.runtime.connector.sink.SinkRuntimeProviderContext) TransformationMetadata(org.apache.flink.table.planner.plan.nodes.exec.utils.TransformationMetadata) PartitionTransformation(org.apache.flink.streaming.api.transformations.PartitionTransformation) LegacySinkTransformation(org.apache.flink.streaming.api.transformations.LegacySinkTransformation) OneInputTransformation(org.apache.flink.streaming.api.transformations.OneInputTransformation) Transformation(org.apache.flink.api.dag.Transformation) TableException(org.apache.flink.table.api.TableException) DataStream(org.apache.flink.streaming.api.datastream.DataStream) DataStreamSinkProvider(org.apache.flink.table.connector.sink.DataStreamSinkProvider) OutputFormat(org.apache.flink.api.common.io.OutputFormat) DataStreamSinkProvider(org.apache.flink.table.connector.sink.DataStreamSinkProvider) TransformationSinkProvider(org.apache.flink.table.planner.connectors.TransformationSinkProvider) SinkProvider(org.apache.flink.table.connector.sink.SinkProvider) SinkFunctionProvider(org.apache.flink.table.connector.sink.SinkFunctionProvider) RowData(org.apache.flink.table.data.RowData) TransformationSinkProvider(org.apache.flink.table.planner.connectors.TransformationSinkProvider) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) OutputFormatSinkFunction(org.apache.flink.streaming.api.functions.sink.OutputFormatSinkFunction) SinkV2Provider(org.apache.flink.table.connector.sink.SinkV2Provider) OutputFormatProvider(org.apache.flink.table.connector.sink.OutputFormatProvider)

Aggregations

DataStream (org.apache.flink.streaming.api.datastream.DataStream)87 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)78 Test (org.junit.Test)70 List (java.util.List)62 Collector (org.apache.flink.util.Collector)60 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)50 SingleOutputStreamOperator (org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator)48 Arrays (java.util.Arrays)46 ArrayList (java.util.ArrayList)40 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)40 Assert.assertEquals (org.junit.Assert.assertEquals)38 WatermarkStrategy (org.apache.flink.api.common.eventtime.WatermarkStrategy)36 Configuration (org.apache.flink.configuration.Configuration)36 Assert.assertTrue (org.junit.Assert.assertTrue)33 BasicTypeInfo (org.apache.flink.api.common.typeinfo.BasicTypeInfo)32 StreamOperator (org.apache.flink.streaming.api.operators.StreamOperator)32 Types (org.apache.flink.api.common.typeinfo.Types)31 Assert (org.junit.Assert)31 ReduceFunction (org.apache.flink.api.common.functions.ReduceFunction)29 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)29