use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.
the class TypeExtractorTest method testTupleOfValues.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testTupleOfValues() {
// use getMapReturnTypes()
RichMapFunction<?, ?> function = new RichMapFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<StringValue, IntValue> map(Tuple2<StringValue, IntValue> value) throws Exception {
return null;
}
};
TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, (TypeInformation) TypeInformation.of(new TypeHint<Tuple2<StringValue, IntValue>>() {
}));
Assert.assertFalse(ti.isBasicType());
Assert.assertTrue(ti.isTupleType());
Assert.assertEquals(StringValue.class, ((TupleTypeInfo<?>) ti).getTypeAt(0).getTypeClass());
Assert.assertEquals(IntValue.class, ((TupleTypeInfo<?>) ti).getTypeAt(1).getTypeClass());
// use getForObject()
Tuple2<StringValue, IntValue> t = new Tuple2<StringValue, IntValue>(new StringValue("x"), new IntValue(1));
TypeInformation<?> ti2 = TypeExtractor.getForObject(t);
Assert.assertFalse(ti2.isBasicType());
Assert.assertTrue(ti2.isTupleType());
Assert.assertEquals(((TupleTypeInfo<?>) ti2).getTypeAt(0).getTypeClass(), StringValue.class);
Assert.assertEquals(((TupleTypeInfo<?>) ti2).getTypeAt(1).getTypeClass(), IntValue.class);
}
use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.
the class TypeExtractorTest method testFunctionWithMissingGenerics.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testFunctionWithMissingGenerics() {
RichMapFunction function = new RichMapFunction() {
private static final long serialVersionUID = 1L;
@Override
public String map(Object value) throws Exception {
return null;
}
};
TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, Types.STRING, "name", true);
Assert.assertTrue(ti instanceof MissingTypeInfo);
try {
TypeExtractor.getMapReturnTypes(function, Types.STRING);
Assert.fail("Expected an exception");
} catch (InvalidTypesException e) {
// expected
}
}
use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.
the class KafkaConsumerTestBase method validateSequence.
private boolean validateSequence(final String topic, final int parallelism, KafkaDeserializationSchema<Tuple2<Integer, Integer>> deserSchema, final int totalNumElements) throws Exception {
final StreamExecutionEnvironment readEnv = StreamExecutionEnvironment.getExecutionEnvironment();
readEnv.getConfig().setRestartStrategy(RestartStrategies.noRestart());
readEnv.setParallelism(parallelism);
Properties readProps = (Properties) standardProps.clone();
readProps.setProperty("group.id", "flink-tests-validator");
readProps.putAll(secureProps);
DataStreamSource<Tuple2<Integer, Integer>> dataStreamSource;
if (useNewSource) {
KafkaSource<Tuple2<Integer, Integer>> source = kafkaServer.getSourceBuilder(topic, deserSchema, readProps).setStartingOffsets(OffsetsInitializer.earliest()).build();
dataStreamSource = readEnv.fromSource(source, WatermarkStrategy.noWatermarks(), "KafkaSource");
} else {
FlinkKafkaConsumerBase<Tuple2<Integer, Integer>> consumer = kafkaServer.getConsumer(topic, deserSchema, readProps);
consumer.setStartFromEarliest();
dataStreamSource = readEnv.addSource(consumer);
}
dataStreamSource.map(new RichMapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>() {
private final int totalCount = parallelism * totalNumElements;
private int count = 0;
@Override
public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> value) throws Exception {
if (++count == totalCount) {
throw new SuccessException();
} else {
return value;
}
}
}).setParallelism(1).addSink(new DiscardingSink<>()).setParallelism(1);
final AtomicReference<Throwable> errorRef = new AtomicReference<>();
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(readEnv.getStreamGraph());
final JobID jobId = jobGraph.getJobID();
Thread runner = new Thread(() -> {
try {
submitJobAndWaitForResult(client, jobGraph, getClass().getClassLoader());
tryExecute(readEnv, "sequence validation");
} catch (Throwable t) {
if (!ExceptionUtils.findThrowable(t, SuccessException.class).isPresent()) {
errorRef.set(t);
}
}
});
runner.start();
final long deadline = System.nanoTime() + 10_000_000_000L;
long delay;
while (runner.isAlive() && (delay = deadline - System.nanoTime()) > 0) {
runner.join(delay / 1_000_000L);
}
boolean success;
if (runner.isAlive()) {
// did not finish in time, maybe the producer dropped one or more records and
// the validation did not reach the exit point
success = false;
client.cancel(jobId).get();
} else {
Throwable error = errorRef.get();
if (error != null) {
success = false;
LOG.info("Sequence validation job failed with exception", error);
} else {
success = true;
}
}
waitUntilNoJobIsRunning(client);
return success;
}
use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.
the class MapOperatorTest method testMapWithRuntimeContext.
@Test
public void testMapWithRuntimeContext() {
try {
final String taskName = "Test Task";
final AtomicBoolean opened = new AtomicBoolean();
final AtomicBoolean closed = new AtomicBoolean();
final MapFunction<String, Integer> parser = new RichMapFunction<String, Integer>() {
@Override
public void open(Configuration parameters) throws Exception {
opened.set(true);
RuntimeContext ctx = getRuntimeContext();
assertEquals(0, ctx.getIndexOfThisSubtask());
assertEquals(1, ctx.getNumberOfParallelSubtasks());
assertEquals(taskName, ctx.getTaskName());
}
@Override
public Integer map(String value) {
return Integer.parseInt(value);
}
@Override
public void close() throws Exception {
closed.set(true);
}
};
MapOperatorBase<String, Integer, MapFunction<String, Integer>> op = new MapOperatorBase<String, Integer, MapFunction<String, Integer>>(parser, new UnaryOperatorInformation<String, Integer>(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO), taskName);
List<String> input = new ArrayList<String>(asList("1", "2", "3", "4", "5", "6"));
final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.disableObjectReuse();
List<Integer> resultMutableSafe = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, UnregisteredMetricsGroup.createOperatorMetricGroup()), executionConfig);
executionConfig.enableObjectReuse();
List<Integer> resultRegular = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, UnregisteredMetricsGroup.createOperatorMetricGroup()), executionConfig);
assertEquals(asList(1, 2, 3, 4, 5, 6), resultMutableSafe);
assertEquals(asList(1, 2, 3, 4, 5, 6), resultRegular);
assertTrue(opened.get());
assertTrue(closed.get());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.
the class WorksetIterationsJavaApiCompilerTest method getJavaTestPlan.
private Plan getJavaTestPlan(boolean joinPreservesSolutionSet, boolean mapBeforeSolutionDelta) {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
@SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> solutionSetInput = env.fromElements(new Tuple3<Long, Long, Long>(1L, 2L, 3L)).name("Solution Set");
@SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> worksetInput = env.fromElements(new Tuple3<Long, Long, Long>(1L, 2L, 3L)).name("Workset");
@SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> invariantInput = env.fromElements(new Tuple3<Long, Long, Long>(1L, 2L, 3L)).name("Invariant Input");
DeltaIteration<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>> iter = solutionSetInput.iterateDelta(worksetInput, 100, 1, 2);
DataSet<Tuple3<Long, Long, Long>> joinedWithSolutionSet = iter.getWorkset().join(invariantInput).where(1, 2).equalTo(1, 2).with(new RichJoinFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {
public Tuple3<Long, Long, Long> join(Tuple3<Long, Long, Long> first, Tuple3<Long, Long, Long> second) {
return first;
}
}).name(JOIN_WITH_INVARIANT_NAME).join(iter.getSolutionSet()).where(1, 0).equalTo(1, 2).with(new RichJoinFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {
public Tuple3<Long, Long, Long> join(Tuple3<Long, Long, Long> first, Tuple3<Long, Long, Long> second) {
return second;
}
}).name(JOIN_WITH_SOLUTION_SET).withForwardedFieldsSecond(joinPreservesSolutionSet ? new String[] { "0->0", "1->1", "2->2" } : null);
DataSet<Tuple3<Long, Long, Long>> nextWorkset = joinedWithSolutionSet.groupBy(1, 2).reduceGroup(new RichGroupReduceFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {
public void reduce(Iterable<Tuple3<Long, Long, Long>> values, Collector<Tuple3<Long, Long, Long>> out) {
}
}).name(NEXT_WORKSET_REDUCER_NAME).withForwardedFields("1->1", "2->2", "0->0");
DataSet<Tuple3<Long, Long, Long>> nextSolutionSet = mapBeforeSolutionDelta ? joinedWithSolutionSet.map(new RichMapFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {
public Tuple3<Long, Long, Long> map(Tuple3<Long, Long, Long> value) {
return value;
}
}).name(SOLUTION_DELTA_MAPPER_NAME).withForwardedFields("0->0", "1->1", "2->2") : joinedWithSolutionSet;
iter.closeWith(nextSolutionSet, nextWorkset).output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>());
return env.createProgramPlan();
}
Aggregations