Search in sources :

Example 11 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class TypeExtractionTest method testFunctionWithMissingGenericsAndReturns.

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testFunctionWithMissingGenericsAndReturns() {
    RichMapFunction function = new RichMapFunction() {

        private static final long serialVersionUID = 1L;

        @Override
        public Object map(Object value) throws Exception {
            return null;
        }
    };
    TypeInformation<?> info = ExecutionEnvironment.getExecutionEnvironment().fromElements("arbitrary", "data").map(function).returns(Types.STRING).getResultType();
    assertEquals(Types.STRING, info);
}
Also used : RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) Test(org.junit.Test)

Example 12 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class TypeExtractorTest method testTupleWithBasicTypes.

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testTupleWithBasicTypes() throws Exception {
    // use getMapReturnTypes()
    RichMapFunction<?, ?> function = new RichMapFunction<Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>, Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>>() {

        private static final long serialVersionUID = 1L;

        @Override
        public Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte> map(Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte> value) throws Exception {
            return null;
        }
    };
    TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, (TypeInformation) TypeInformation.of(new TypeHint<Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>>() {
    }));
    Assert.assertTrue(ti.isTupleType());
    Assert.assertEquals(9, ti.getArity());
    Assert.assertTrue(ti instanceof TupleTypeInfo);
    List<FlatFieldDescriptor> ffd = new ArrayList<FlatFieldDescriptor>();
    ((TupleTypeInfo) ti).getFlatFields("f3", 0, ffd);
    Assert.assertTrue(ffd.size() == 1);
    Assert.assertEquals(3, ffd.get(0).getPosition());
    TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
    Assert.assertEquals(Tuple9.class, tti.getTypeClass());
    for (int i = 0; i < 9; i++) {
        Assert.assertTrue(tti.getTypeAt(i) instanceof BasicTypeInfo);
    }
    Assert.assertEquals(BasicTypeInfo.INT_TYPE_INFO, tti.getTypeAt(0));
    Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tti.getTypeAt(1));
    Assert.assertEquals(BasicTypeInfo.DOUBLE_TYPE_INFO, tti.getTypeAt(2));
    Assert.assertEquals(BasicTypeInfo.FLOAT_TYPE_INFO, tti.getTypeAt(3));
    Assert.assertEquals(BasicTypeInfo.BOOLEAN_TYPE_INFO, tti.getTypeAt(4));
    Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, tti.getTypeAt(5));
    Assert.assertEquals(BasicTypeInfo.CHAR_TYPE_INFO, tti.getTypeAt(6));
    Assert.assertEquals(BasicTypeInfo.SHORT_TYPE_INFO, tti.getTypeAt(7));
    Assert.assertEquals(BasicTypeInfo.BYTE_TYPE_INFO, tti.getTypeAt(8));
    // use getForObject()
    Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte> t = new Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>(1, 1L, 1.0, 1.0F, false, "Hello World", 'w', (short) 1, (byte) 1);
    Assert.assertTrue(TypeExtractor.getForObject(t) instanceof TupleTypeInfo);
    TupleTypeInfo<?> tti2 = (TupleTypeInfo<?>) TypeExtractor.getForObject(t);
    Assert.assertEquals(BasicTypeInfo.INT_TYPE_INFO, tti2.getTypeAt(0));
    Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tti2.getTypeAt(1));
    Assert.assertEquals(BasicTypeInfo.DOUBLE_TYPE_INFO, tti2.getTypeAt(2));
    Assert.assertEquals(BasicTypeInfo.FLOAT_TYPE_INFO, tti2.getTypeAt(3));
    Assert.assertEquals(BasicTypeInfo.BOOLEAN_TYPE_INFO, tti2.getTypeAt(4));
    Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, tti2.getTypeAt(5));
    Assert.assertEquals(BasicTypeInfo.CHAR_TYPE_INFO, tti2.getTypeAt(6));
    Assert.assertEquals(BasicTypeInfo.SHORT_TYPE_INFO, tti2.getTypeAt(7));
    Assert.assertEquals(BasicTypeInfo.BYTE_TYPE_INFO, tti2.getTypeAt(8));
    // test that getForClass does not work
    try {
        TypeExtractor.getForClass(Tuple9.class);
        Assert.fail("Exception expected here");
    } catch (InvalidTypesException e) {
    // that is correct
    }
}
Also used : ArrayList(java.util.ArrayList) Tuple9(org.apache.flink.api.java.tuple.Tuple9) FlatFieldDescriptor(org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) BigInteger(java.math.BigInteger) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) InvalidTypesException(org.apache.flink.api.common.functions.InvalidTypesException) Test(org.junit.Test)

Example 13 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class AsyncWaitOperatorTest method createChainedVertex.

private JobVertex createChainedVertex(AsyncFunction<Integer, Integer> firstFunction, AsyncFunction<Integer, Integer> secondFunction) {
    StreamExecutionEnvironment chainEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    // set parallelism to 2 to avoid chaining with source in case when available processors is
    // 1.
    chainEnv.setParallelism(2);
    // the input is only used to construct a chained operator, and they will not be used in the
    // real tests.
    DataStream<Integer> input = chainEnv.fromElements(1, 2, 3);
    input = addAsyncOperatorLegacyChained(input, firstFunction, TIMEOUT, 6, AsyncDataStream.OutputMode.ORDERED);
    // the map function is designed to chain after async function. we place an Integer object in
    // it and
    // it is initialized in the open() method.
    // it is used to verify that operators in the operator chain should be opened from the tail
    // to the head,
    // so the result from AsyncWaitOperator can pass down successfully and correctly.
    // if not, the test can not be passed.
    input = input.map(new RichMapFunction<Integer, Integer>() {

        private static final long serialVersionUID = 1L;

        private Integer initialValue = null;

        @Override
        public void open(Configuration parameters) throws Exception {
            initialValue = 1;
        }

        @Override
        public Integer map(Integer value) throws Exception {
            return initialValue + value;
        }
    });
    input = addAsyncOperatorLegacyChained(input, secondFunction, TIMEOUT, 3, AsyncDataStream.OutputMode.UNORDERED);
    input.map(new MapFunction<Integer, Integer>() {

        private static final long serialVersionUID = 5162085254238405527L;

        @Override
        public Integer map(Integer value) throws Exception {
            return value;
        }
    }).startNewChain().addSink(new DiscardingSink<Integer>());
    // be build our own OperatorChain
    final JobGraph jobGraph = chainEnv.getStreamGraph().getJobGraph();
    Assert.assertEquals(3, jobGraph.getVerticesSortedTopologicallyFromSources().size());
    return jobGraph.getVerticesSortedTopologicallyFromSources().get(1);
}
Also used : JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) Configuration(org.apache.flink.configuration.Configuration) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) TimeoutException(java.util.concurrent.TimeoutException)

Example 14 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class TaskManagerProcessFailureBatchRecoveryITCase method testTaskManagerFailure.

// --------------------------------------------------------------------------------------------
// Test the program
// --------------------------------------------------------------------------------------------
@Override
public void testTaskManagerFailure(Configuration configuration, final File coordinateDir) throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", 1337, configuration);
    env.setParallelism(PARALLELISM);
    env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 5000L));
    env.getConfig().setExecutionMode(executionMode);
    final long numElements = 100000L;
    final DataSet<Long> result = env.generateSequence(1, numElements).rebalance().map(new RichMapFunction<Long, Long>() {

        private final File proceedFile = new File(coordinateDir, PROCEED_MARKER_FILE);

        private boolean markerCreated = false;

        private boolean checkForProceedFile = true;

        @Override
        public Long map(Long value) throws Exception {
            if (!markerCreated) {
                int taskIndex = getRuntimeContext().getIndexOfThisSubtask();
                touchFile(new File(coordinateDir, READY_MARKER_FILE_PREFIX + taskIndex));
                markerCreated = true;
            }
            // check if the proceed file exists
            if (checkForProceedFile) {
                if (proceedFile.exists()) {
                    checkForProceedFile = false;
                } else {
                    // otherwise wait so that we make slow progress
                    Thread.sleep(100);
                }
            }
            return value;
        }
    }).reduce(new ReduceFunction<Long>() {

        @Override
        public Long reduce(Long value1, Long value2) {
            return value1 + value2;
        }
    });
    long sum = result.collect().get(0);
    assertEquals(numElements * (numElements + 1L) / 2L, sum);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) File(java.io.File)

Example 15 with RichMapFunction

use of org.apache.flink.api.common.functions.RichMapFunction in project flink by apache.

the class StateBackendITCase method testStateBackendWithoutCheckpointing.

/**
 * Verify that the user-specified state backend is used even if checkpointing is disabled.
 */
@Test
public void testStateBackendWithoutCheckpointing() throws Exception {
    StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
    see.setParallelism(1);
    see.getConfig().setRestartStrategy(RestartStrategies.noRestart());
    see.setStateBackend(new FailingStateBackend());
    see.fromElements(new Tuple2<>("Hello", 1)).keyBy(0).map(new RichMapFunction<Tuple2<String, Integer>, String>() {

        private static final long serialVersionUID = 1L;

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            getRuntimeContext().getState(new ValueStateDescriptor<>("Test", Integer.class));
        }

        @Override
        public String map(Tuple2<String, Integer> value) throws Exception {
            return value.f0;
        }
    }).print();
    try {
        see.execute();
        fail();
    } catch (JobExecutionException e) {
        assertTrue(ExceptionUtils.findThrowable(e, SuccessException.class).isPresent());
    }
}
Also used : JobExecutionException(org.apache.flink.runtime.client.JobExecutionException) Configuration(org.apache.flink.configuration.Configuration) Tuple2(org.apache.flink.api.java.tuple.Tuple2) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Test(org.junit.Test)

Aggregations

RichMapFunction (org.apache.flink.api.common.functions.RichMapFunction)15 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)6 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)6 Test (org.junit.Test)6 Configuration (org.apache.flink.configuration.Configuration)5 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)4 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)3 File (java.io.File)2 ArrayList (java.util.ArrayList)2 Properties (java.util.Properties)2 TimeoutException (java.util.concurrent.TimeoutException)2 InvalidTypesException (org.apache.flink.api.common.functions.InvalidTypesException)2 MapFunction (org.apache.flink.api.common.functions.MapFunction)2 SuccessException (org.apache.flink.test.util.SuccessException)2 BigInteger (java.math.BigInteger)1 HashMap (java.util.HashMap)1 ExecutionException (java.util.concurrent.ExecutionException)1 Future (java.util.concurrent.Future)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 AtomicReference (java.util.concurrent.atomic.AtomicReference)1