Search in sources :

Example 6 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class TypeExtractorTest method testTupleOfValues.

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testTupleOfValues() {
    // use getMapReturnTypes()
    RichMapFunction<?, ?> function = new RichMapFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>() {

        private static final long serialVersionUID = 1L;

        @Override
        public Tuple2<StringValue, IntValue> map(Tuple2<StringValue, IntValue> value) throws Exception {
            return null;
        }
    };
    TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, (TypeInformation) TypeInformation.of(new TypeHint<Tuple2<StringValue, IntValue>>() {
    }));
    Assert.assertFalse(ti.isBasicType());
    Assert.assertTrue(ti.isTupleType());
    Assert.assertEquals(StringValue.class, ((TupleTypeInfo<?>) ti).getTypeAt(0).getTypeClass());
    Assert.assertEquals(IntValue.class, ((TupleTypeInfo<?>) ti).getTypeAt(1).getTypeClass());
    // use getForObject()
    Tuple2<StringValue, IntValue> t = new Tuple2<StringValue, IntValue>(new StringValue("x"), new IntValue(1));
    TypeInformation<?> ti2 = TypeExtractor.getForObject(t);
    Assert.assertFalse(ti2.isBasicType());
    Assert.assertTrue(ti2.isTupleType());
    Assert.assertEquals(((TupleTypeInfo<?>) ti2).getTypeAt(0).getTypeClass(), StringValue.class);
    Assert.assertEquals(((TupleTypeInfo<?>) ti2).getTypeAt(1).getTypeClass(), IntValue.class);
}
Also used : RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) Tuple2(org.apache.flink.api.java.tuple.Tuple2) StringValue(org.apache.flink.types.StringValue) IntValue(org.apache.flink.types.IntValue) Test(org.junit.Test)

Example 7 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class TypeExtractorTest method testFunctionWithMissingGenerics.

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testFunctionWithMissingGenerics() {
    RichMapFunction function = new RichMapFunction() {

        private static final long serialVersionUID = 1L;

        @Override
        public String map(Object value) throws Exception {
            return null;
        }
    };
    TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, Types.STRING, "name", true);
    Assert.assertTrue(ti instanceof MissingTypeInfo);
    try {
        TypeExtractor.getMapReturnTypes(function, Types.STRING);
        Assert.fail("Expected an exception");
    } catch (InvalidTypesException e) {
    // expected
    }
}
Also used : RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) InvalidTypesException(org.apache.flink.api.common.functions.InvalidTypesException) Test(org.junit.Test)

Example 8 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class KafkaConsumerTestBase method validateSequence.

private boolean validateSequence(final String topic, final int parallelism, KafkaDeserializationSchema<Tuple2<Integer, Integer>> deserSchema, final int totalNumElements) throws Exception {
    final StreamExecutionEnvironment readEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    readEnv.getConfig().setRestartStrategy(RestartStrategies.noRestart());
    readEnv.setParallelism(parallelism);
    Properties readProps = (Properties) standardProps.clone();
    readProps.setProperty("group.id", "flink-tests-validator");
    readProps.putAll(secureProps);
    DataStreamSource<Tuple2<Integer, Integer>> dataStreamSource;
    if (useNewSource) {
        KafkaSource<Tuple2<Integer, Integer>> source = kafkaServer.getSourceBuilder(topic, deserSchema, readProps).setStartingOffsets(OffsetsInitializer.earliest()).build();
        dataStreamSource = readEnv.fromSource(source, WatermarkStrategy.noWatermarks(), "KafkaSource");
    } else {
        FlinkKafkaConsumerBase<Tuple2<Integer, Integer>> consumer = kafkaServer.getConsumer(topic, deserSchema, readProps);
        consumer.setStartFromEarliest();
        dataStreamSource = readEnv.addSource(consumer);
    }
    dataStreamSource.map(new RichMapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>() {

        private final int totalCount = parallelism * totalNumElements;

        private int count = 0;

        @Override
        public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> value) throws Exception {
            if (++count == totalCount) {
                throw new SuccessException();
            } else {
                return value;
            }
        }
    }).setParallelism(1).addSink(new DiscardingSink<>()).setParallelism(1);
    final AtomicReference<Throwable> errorRef = new AtomicReference<>();
    JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(readEnv.getStreamGraph());
    final JobID jobId = jobGraph.getJobID();
    Thread runner = new Thread(() -> {
        try {
            submitJobAndWaitForResult(client, jobGraph, getClass().getClassLoader());
            tryExecute(readEnv, "sequence validation");
        } catch (Throwable t) {
            if (!ExceptionUtils.findThrowable(t, SuccessException.class).isPresent()) {
                errorRef.set(t);
            }
        }
    });
    runner.start();
    final long deadline = System.nanoTime() + 10_000_000_000L;
    long delay;
    while (runner.isAlive() && (delay = deadline - System.nanoTime()) > 0) {
        runner.join(delay / 1_000_000L);
    }
    boolean success;
    if (runner.isAlive()) {
        // did not finish in time, maybe the producer dropped one or more records and
        // the validation did not reach the exit point
        success = false;
        client.cancel(jobId).get();
    } else {
        Throwable error = errorRef.get();
        if (error != null) {
            success = false;
            LOG.info("Sequence validation job failed with exception", error);
        } else {
            success = true;
        }
    }
    waitUntilNoJobIsRunning(client);
    return success;
}
Also used : DiscardingSink(org.apache.flink.streaming.api.functions.sink.DiscardingSink) AtomicReference(java.util.concurrent.atomic.AtomicReference) Properties(java.util.Properties) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) Tuple2(org.apache.flink.api.java.tuple.Tuple2) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) SuccessException(org.apache.flink.test.util.SuccessException) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) JobID(org.apache.flink.api.common.JobID)

Example 9 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class MapOperatorTest method testMapWithRuntimeContext.

@Test
public void testMapWithRuntimeContext() {
    try {
        final String taskName = "Test Task";
        final AtomicBoolean opened = new AtomicBoolean();
        final AtomicBoolean closed = new AtomicBoolean();
        final MapFunction<String, Integer> parser = new RichMapFunction<String, Integer>() {

            @Override
            public void open(Configuration parameters) throws Exception {
                opened.set(true);
                RuntimeContext ctx = getRuntimeContext();
                assertEquals(0, ctx.getIndexOfThisSubtask());
                assertEquals(1, ctx.getNumberOfParallelSubtasks());
                assertEquals(taskName, ctx.getTaskName());
            }

            @Override
            public Integer map(String value) {
                return Integer.parseInt(value);
            }

            @Override
            public void close() throws Exception {
                closed.set(true);
            }
        };
        MapOperatorBase<String, Integer, MapFunction<String, Integer>> op = new MapOperatorBase<String, Integer, MapFunction<String, Integer>>(parser, new UnaryOperatorInformation<String, Integer>(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO), taskName);
        List<String> input = new ArrayList<String>(asList("1", "2", "3", "4", "5", "6"));
        final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
        final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
        final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
        ExecutionConfig executionConfig = new ExecutionConfig();
        executionConfig.disableObjectReuse();
        List<Integer> resultMutableSafe = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, UnregisteredMetricsGroup.createOperatorMetricGroup()), executionConfig);
        executionConfig.enableObjectReuse();
        List<Integer> resultRegular = op.executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, UnregisteredMetricsGroup.createOperatorMetricGroup()), executionConfig);
        assertEquals(asList(1, 2, 3, 4, 5, 6), resultMutableSafe);
        assertEquals(asList(1, 2, 3, 4, 5, 6), resultRegular);
        assertTrue(opened.get());
        assertTrue(closed.get());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : Accumulator(org.apache.flink.api.common.accumulators.Accumulator) Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) MapFunction(org.apache.flink.api.common.functions.MapFunction) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) TaskInfo(org.apache.flink.api.common.TaskInfo) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) RuntimeUDFContext(org.apache.flink.api.common.functions.util.RuntimeUDFContext) Future(java.util.concurrent.Future) RuntimeContext(org.apache.flink.api.common.functions.RuntimeContext) Test(org.junit.Test)

Example 10 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class WorksetIterationsJavaApiCompilerTest method getJavaTestPlan.

private Plan getJavaTestPlan(boolean joinPreservesSolutionSet, boolean mapBeforeSolutionDelta) {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(DEFAULT_PARALLELISM);
    @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> solutionSetInput = env.fromElements(new Tuple3<Long, Long, Long>(1L, 2L, 3L)).name("Solution Set");
    @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> worksetInput = env.fromElements(new Tuple3<Long, Long, Long>(1L, 2L, 3L)).name("Workset");
    @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> invariantInput = env.fromElements(new Tuple3<Long, Long, Long>(1L, 2L, 3L)).name("Invariant Input");
    DeltaIteration<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>> iter = solutionSetInput.iterateDelta(worksetInput, 100, 1, 2);
    DataSet<Tuple3<Long, Long, Long>> joinedWithSolutionSet = iter.getWorkset().join(invariantInput).where(1, 2).equalTo(1, 2).with(new RichJoinFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {

        public Tuple3<Long, Long, Long> join(Tuple3<Long, Long, Long> first, Tuple3<Long, Long, Long> second) {
            return first;
        }
    }).name(JOIN_WITH_INVARIANT_NAME).join(iter.getSolutionSet()).where(1, 0).equalTo(1, 2).with(new RichJoinFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {

        public Tuple3<Long, Long, Long> join(Tuple3<Long, Long, Long> first, Tuple3<Long, Long, Long> second) {
            return second;
        }
    }).name(JOIN_WITH_SOLUTION_SET).withForwardedFieldsSecond(joinPreservesSolutionSet ? new String[] { "0->0", "1->1", "2->2" } : null);
    DataSet<Tuple3<Long, Long, Long>> nextWorkset = joinedWithSolutionSet.groupBy(1, 2).reduceGroup(new RichGroupReduceFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {

        public void reduce(Iterable<Tuple3<Long, Long, Long>> values, Collector<Tuple3<Long, Long, Long>> out) {
        }
    }).name(NEXT_WORKSET_REDUCER_NAME).withForwardedFields("1->1", "2->2", "0->0");
    DataSet<Tuple3<Long, Long, Long>> nextSolutionSet = mapBeforeSolutionDelta ? joinedWithSolutionSet.map(new RichMapFunction<Tuple3<Long, Long, Long>, Tuple3<Long, Long, Long>>() {

        public Tuple3<Long, Long, Long> map(Tuple3<Long, Long, Long> value) {
            return value;
        }
    }).name(SOLUTION_DELTA_MAPPER_NAME).withForwardedFields("0->0", "1->1", "2->2") : joinedWithSolutionSet;
    iter.closeWith(nextSolutionSet, nextWorkset).output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>());
    return env.createProgramPlan();
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) RichJoinFunction(org.apache.flink.api.common.functions.RichJoinFunction) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) Tuple3(org.apache.flink.api.java.tuple.Tuple3)

Aggregations

RichMapFunction (org.apache.flink.api.common.functions.RichMapFunction)15 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)6 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)6 Test (org.junit.Test)6 Configuration (org.apache.flink.configuration.Configuration)5 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)4 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)3 File (java.io.File)2 ArrayList (java.util.ArrayList)2 Properties (java.util.Properties)2 TimeoutException (java.util.concurrent.TimeoutException)2 InvalidTypesException (org.apache.flink.api.common.functions.InvalidTypesException)2 MapFunction (org.apache.flink.api.common.functions.MapFunction)2 SuccessException (org.apache.flink.test.util.SuccessException)2 BigInteger (java.math.BigInteger)1 HashMap (java.util.HashMap)1 ExecutionException (java.util.concurrent.ExecutionException)1 Future (java.util.concurrent.Future)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 AtomicReference (java.util.concurrent.atomic.AtomicReference)1