use of org.apache.flink.api.common.functions.RuntimeContext in project flink by apache.
the class FlinkKafkaProducerBaseTest method testPartitionerOpenedWithDeterminatePartitionList.
/**
* Tests that partitions list is determinate and correctly provided to custom partitioner
*/
@Test
public void testPartitionerOpenedWithDeterminatePartitionList() throws Exception {
KafkaPartitioner mockPartitioner = mock(KafkaPartitioner.class);
RuntimeContext mockRuntimeContext = mock(RuntimeContext.class);
when(mockRuntimeContext.getIndexOfThisSubtask()).thenReturn(0);
when(mockRuntimeContext.getNumberOfParallelSubtasks()).thenReturn(1);
// out-of-order list of 4 partitions
List<PartitionInfo> mockPartitionsList = new ArrayList<>(4);
mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 3, null, null, null));
mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 1, null, null, null));
mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 0, null, null, null));
mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 2, null, null, null));
final DummyFlinkKafkaProducer producer = new DummyFlinkKafkaProducer(FakeStandardProducerConfig.get(), mockPartitioner);
producer.setRuntimeContext(mockRuntimeContext);
final KafkaProducer mockProducer = producer.getMockKafkaProducer();
when(mockProducer.partitionsFor(anyString())).thenReturn(mockPartitionsList);
when(mockProducer.metrics()).thenReturn(null);
producer.open(new Configuration());
// the out-of-order partitions list should be sorted before provided to the custom partitioner's open() method
int[] correctPartitionList = { 0, 1, 2, 3 };
verify(mockPartitioner).open(0, 1, correctPartitionList);
}
use of org.apache.flink.api.common.functions.RuntimeContext in project flink by apache.
the class NiFiSinkTopologyExample method main.
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SiteToSiteClientConfig clientConfig = new SiteToSiteClient.Builder().url("http://localhost:8080/nifi").portName("Data from Flink").buildConfig();
DataStreamSink<String> dataStream = env.fromElements("one", "two", "three", "four", "five", "q").addSink(new NiFiSink<>(clientConfig, new NiFiDataPacketBuilder<String>() {
@Override
public NiFiDataPacket createNiFiDataPacket(String s, RuntimeContext ctx) {
return new StandardNiFiDataPacket(s.getBytes(ConfigConstants.DEFAULT_CHARSET), new HashMap<String, String>());
}
}));
env.execute();
}
use of org.apache.flink.api.common.functions.RuntimeContext in project flink by apache.
the class MapOperatorTest method testMapWithRuntimeContext.
@Test
public void testMapWithRuntimeContext() {
try {
final String taskName = "Test Task";
final AtomicBoolean opened = new AtomicBoolean();
final AtomicBoolean closed = new AtomicBoolean();
final MapFunction<String, Integer> parser = new RichMapFunction<String, Integer>() {
@Override
public void open(Configuration parameters) throws Exception {
opened.set(true);
RuntimeContext ctx = getRuntimeContext();
assertEquals(0, ctx.getIndexOfThisSubtask());
assertEquals(1, ctx.getNumberOfParallelSubtasks());
assertEquals(taskName, ctx.getTaskName());
}
@Override
public Integer map(String value) {
return Integer.parseInt(value);
}
@Override
public void close() throws Exception {
closed.set(true);
}
};
MapOperatorBase<String, Integer, MapFunction<String, Integer>> op = new MapOperatorBase<String, Integer, MapFunction<String, Integer>>(parser, new UnaryOperatorInformation<String, Integer>(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO), taskName);
List<String> input = new ArrayList<String>(asList("1", "2", "3", "4", "5", "6"));
final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.disableObjectReuse();
List<Integer> resultMutableSafe = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, new UnregisteredMetricsGroup()), executionConfig);
executionConfig.enableObjectReuse();
List<Integer> resultRegular = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, new UnregisteredMetricsGroup()), executionConfig);
assertEquals(asList(1, 2, 3, 4, 5, 6), resultMutableSafe);
assertEquals(asList(1, 2, 3, 4, 5, 6), resultRegular);
assertTrue(opened.get());
assertTrue(closed.get());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.functions.RuntimeContext in project flink by apache.
the class CoGroupOperatorCollectionTest method testExecuteOnCollection.
@Test
public void testExecuteOnCollection() {
try {
List<Tuple2<String, Integer>> input1 = Arrays.asList(new Tuple2Builder<String, Integer>().add("foo", 1).add("foobar", 1).add("foo", 1).add("bar", 1).add("foo", 1).add("foo", 1).build());
List<Tuple2<String, Integer>> input2 = Arrays.asList(new Tuple2Builder<String, Integer>().add("foo", 1).add("foo", 1).add("bar", 1).add("foo", 1).add("barfoo", 1).add("foo", 1).build());
ExecutionConfig executionConfig = new ExecutionConfig();
final HashMap<String, Accumulator<?, ?>> accumulators = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo("Test UDF", 4, 0, 4, 0);
final RuntimeContext ctx = new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulators, new UnregisteredMetricsGroup());
{
SumCoGroup udf1 = new SumCoGroup();
SumCoGroup udf2 = new SumCoGroup();
executionConfig.disableObjectReuse();
List<Tuple2<String, Integer>> resultSafe = getCoGroupOperator(udf1).executeOnCollections(input1, input2, ctx, executionConfig);
executionConfig.enableObjectReuse();
List<Tuple2<String, Integer>> resultRegular = getCoGroupOperator(udf2).executeOnCollections(input1, input2, ctx, executionConfig);
Assert.assertTrue(udf1.isClosed);
Assert.assertTrue(udf2.isClosed);
Set<Tuple2<String, Integer>> expected = new HashSet<Tuple2<String, Integer>>(Arrays.asList(new Tuple2Builder<String, Integer>().add("foo", 8).add("bar", 2).add("foobar", 1).add("barfoo", 1).build()));
Assert.assertEquals(expected, new HashSet<Tuple2<String, Integer>>(resultSafe));
Assert.assertEquals(expected, new HashSet<Tuple2<String, Integer>>(resultRegular));
}
{
executionConfig.disableObjectReuse();
List<Tuple2<String, Integer>> resultSafe = getCoGroupOperator(new SumCoGroup()).executeOnCollections(Collections.<Tuple2<String, Integer>>emptyList(), Collections.<Tuple2<String, Integer>>emptyList(), ctx, executionConfig);
executionConfig.enableObjectReuse();
List<Tuple2<String, Integer>> resultRegular = getCoGroupOperator(new SumCoGroup()).executeOnCollections(Collections.<Tuple2<String, Integer>>emptyList(), Collections.<Tuple2<String, Integer>>emptyList(), ctx, executionConfig);
Assert.assertEquals(0, resultSafe.size());
Assert.assertEquals(0, resultRegular.size());
}
} catch (Throwable t) {
t.printStackTrace();
Assert.fail(t.getMessage());
}
}
use of org.apache.flink.api.common.functions.RuntimeContext in project flink by apache.
the class GroupReduceOperatorTest method testGroupReduceCollectionWithRuntimeContext.
@Test
public void testGroupReduceCollectionWithRuntimeContext() {
try {
final String taskName = "Test Task";
final AtomicBoolean opened = new AtomicBoolean();
final AtomicBoolean closed = new AtomicBoolean();
final RichGroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>> reducer = new RichGroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {
@Override
public void reduce(Iterable<Tuple2<String, Integer>> values, Collector<Tuple2<String, Integer>> out) throws Exception {
Iterator<Tuple2<String, Integer>> input = values.iterator();
Tuple2<String, Integer> result = input.next();
int sum = result.f1;
while (input.hasNext()) {
Tuple2<String, Integer> next = input.next();
sum += next.f1;
}
result.f1 = sum;
out.collect(result);
}
@Override
public void open(Configuration parameters) throws Exception {
opened.set(true);
RuntimeContext ctx = getRuntimeContext();
assertEquals(0, ctx.getIndexOfThisSubtask());
assertEquals(1, ctx.getNumberOfParallelSubtasks());
assertEquals(taskName, ctx.getTaskName());
}
@Override
public void close() throws Exception {
closed.set(true);
}
};
GroupReduceOperatorBase<Tuple2<String, Integer>, Tuple2<String, Integer>, GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>> op = new GroupReduceOperatorBase<Tuple2<String, Integer>, Tuple2<String, Integer>, GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>>(reducer, new UnaryOperatorInformation<Tuple2<String, Integer>, Tuple2<String, Integer>>(TypeInfoParser.<Tuple2<String, Integer>>parse("Tuple2<String, Integer>"), TypeInfoParser.<Tuple2<String, Integer>>parse("Tuple2<String, Integer>")), new int[] { 0 }, "TestReducer");
List<Tuple2<String, Integer>> input = new ArrayList<Tuple2<String, Integer>>(asList(new Tuple2<String, Integer>("foo", 1), new Tuple2<String, Integer>("foo", 3), new Tuple2<String, Integer>("bar", 2), new Tuple2<String, Integer>("bar", 4)));
final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.disableObjectReuse();
List<Tuple2<String, Integer>> resultMutableSafe = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()), executionConfig);
executionConfig.enableObjectReuse();
List<Tuple2<String, Integer>> resultRegular = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()), executionConfig);
Set<Tuple2<String, Integer>> resultSetMutableSafe = new HashSet<Tuple2<String, Integer>>(resultMutableSafe);
Set<Tuple2<String, Integer>> resultSetRegular = new HashSet<Tuple2<String, Integer>>(resultRegular);
Set<Tuple2<String, Integer>> expectedResult = new HashSet<Tuple2<String, Integer>>(asList(new Tuple2<String, Integer>("foo", 4), new Tuple2<String, Integer>("bar", 6)));
assertEquals(expectedResult, resultSetMutableSafe);
assertEquals(expectedResult, resultSetRegular);
assertTrue(opened.get());
assertTrue(closed.get());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations