Search in sources :

Example 66 with ExecutionConfig

use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.

the class EvictingWindowOperatorTest method testDeltaEvictorEvictBefore.

/**
	 * Tests DeltaEvictor, evictBefore behavior
	 * @throws Exception
	 */
@Test
public void testDeltaEvictorEvictBefore() throws Exception {
    AtomicInteger closeCalled = new AtomicInteger(0);
    final int TRIGGER_COUNT = 2;
    final boolean EVICT_AFTER = false;
    final int THRESHOLD = 2;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(TRIGGER_COUNT), DeltaEvictor.of(THRESHOLD, new DeltaFunction<Tuple2<String, Integer>>() {

        @Override
        public double getDelta(Tuple2<String, Integer> oldDataPoint, Tuple2<String, Integer> newDataPoint) {
            return newDataPoint.f1 - oldDataPoint.f1;
        }
    }, EVICT_AFTER), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), initialTime + 3999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 5), initialTime + 999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), initialTime + 1998));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), initialTime + 1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 11), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), initialTime + 10999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 8), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 10), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
    Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) DeltaFunction(org.apache.flink.streaming.api.functions.windowing.delta.DeltaFunction) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) GlobalWindow(org.apache.flink.streaming.api.windowing.windows.GlobalWindow) Test(org.junit.Test)

Example 67 with ExecutionConfig

use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.

the class SlidingEventTimeWindowsTest method testProperties.

@Test
public void testProperties() {
    SlidingEventTimeWindows assigner = SlidingEventTimeWindows.of(Time.seconds(5), Time.milliseconds(100));
    assertTrue(assigner.isEventTime());
    assertEquals(new TimeWindow.Serializer(), assigner.getWindowSerializer(new ExecutionConfig()));
    assertThat(assigner.getDefaultTrigger(mock(StreamExecutionEnvironment.class)), instanceOf(EventTimeTrigger.class));
}
Also used : SlidingEventTimeWindows(org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) EventTimeTrigger(org.apache.flink.streaming.api.windowing.triggers.EventTimeTrigger) Test(org.junit.Test)

Example 68 with ExecutionConfig

use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.

the class ProcessingTimeSessionWindowsTest method testProperties.

@Test
public void testProperties() {
    ProcessingTimeSessionWindows assigner = ProcessingTimeSessionWindows.withGap(Time.seconds(5));
    assertFalse(assigner.isEventTime());
    assertEquals(new TimeWindow.Serializer(), assigner.getWindowSerializer(new ExecutionConfig()));
    assertThat(assigner.getDefaultTrigger(mock(StreamExecutionEnvironment.class)), instanceOf(ProcessingTimeTrigger.class));
}
Also used : ProcessingTimeSessionWindows(org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows) ProcessingTimeTrigger(org.apache.flink.streaming.api.windowing.triggers.ProcessingTimeTrigger) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) Test(org.junit.Test)

Example 69 with ExecutionConfig

use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.

the class MassiveStringSorting method testStringTuplesSorting.

@SuppressWarnings("unchecked")
public void testStringTuplesSorting() {
    final int NUM_STRINGS = 300000;
    File input = null;
    File sorted = null;
    try {
        // the source file
        input = generateFileWithStringTuples(NUM_STRINGS, "http://some-uri.com/that/is/a/common/prefix/to/all");
        // the sorted file
        sorted = File.createTempFile("sorted_strings", "txt");
        String[] command = { "/bin/bash", "-c", "export LC_ALL=\"C\" && cat \"" + input.getAbsolutePath() + "\" | sort > \"" + sorted.getAbsolutePath() + "\"" };
        Process p = null;
        try {
            p = Runtime.getRuntime().exec(command);
            int retCode = p.waitFor();
            if (retCode != 0) {
                throw new Exception("Command failed with return code " + retCode);
            }
            p = null;
        } finally {
            if (p != null) {
                p.destroy();
            }
        }
        // sort the data
        UnilateralSortMerger<Tuple2<String, String[]>> sorter = null;
        BufferedReader reader = null;
        BufferedReader verifyReader = null;
        try {
            MemoryManager mm = new MemoryManager(1024 * 1024, 1);
            IOManager ioMan = new IOManagerAsync();
            TupleTypeInfo<Tuple2<String, String[]>> typeInfo = (TupleTypeInfo<Tuple2<String, String[]>>) TypeInfoParser.<Tuple2<String, String[]>>parse("Tuple2<String, String[]>");
            TypeSerializer<Tuple2<String, String[]>> serializer = typeInfo.createSerializer(new ExecutionConfig());
            TypeComparator<Tuple2<String, String[]>> comparator = typeInfo.createComparator(new int[] { 0 }, new boolean[] { true }, 0, new ExecutionConfig());
            reader = new BufferedReader(new FileReader(input));
            MutableObjectIterator<Tuple2<String, String[]>> inputIterator = new StringTupleReaderMutableObjectIterator(reader);
            sorter = new UnilateralSortMerger<Tuple2<String, String[]>>(mm, ioMan, inputIterator, new DummyInvokable(), new RuntimeSerializerFactory<Tuple2<String, String[]>>(serializer, (Class<Tuple2<String, String[]>>) (Class<?>) Tuple2.class), comparator, 1.0, 4, 0.8f, true, /* use large record handler */
            false);
            // use this part to verify that all if good when sorting in memory
            //				List<MemorySegment> memory = mm.allocatePages(new DummyInvokable(), mm.computeNumberOfPages(1024*1024*1024));
            //				NormalizedKeySorter<Tuple2<String, String[]>> nks = new NormalizedKeySorter<Tuple2<String,String[]>>(serializer, comparator, memory);
            //
            //				{
            //					Tuple2<String, String[]> wi = new Tuple2<String, String[]>("", new String[0]);
            //					while ((wi = inputIterator.next(wi)) != null) {
            //						Assert.assertTrue(nks.write(wi));
            //					}
            //					
            //					new QuickSort().sort(nks);
            //				}
            //				
            //				MutableObjectIterator<Tuple2<String, String[]>> sortedData = nks.getIterator();
            MutableObjectIterator<Tuple2<String, String[]>> sortedData = sorter.getIterator();
            reader.close();
            // verify
            verifyReader = new BufferedReader(new FileReader(sorted));
            MutableObjectIterator<Tuple2<String, String[]>> verifyIterator = new StringTupleReaderMutableObjectIterator(verifyReader);
            Tuple2<String, String[]> next = new Tuple2<String, String[]>("", new String[0]);
            Tuple2<String, String[]> nextFromStratoSort = new Tuple2<String, String[]>("", new String[0]);
            int num = 0;
            while ((next = verifyIterator.next(next)) != null) {
                num++;
                nextFromStratoSort = sortedData.next(nextFromStratoSort);
                Assert.assertNotNull(nextFromStratoSort);
                Assert.assertEquals(next.f0, nextFromStratoSort.f0);
                Assert.assertArrayEquals(next.f1, nextFromStratoSort.f1);
            }
            Assert.assertNull(sortedData.next(nextFromStratoSort));
            Assert.assertEquals(NUM_STRINGS, num);
        } finally {
            if (reader != null) {
                reader.close();
            }
            if (verifyReader != null) {
                verifyReader.close();
            }
            if (sorter != null) {
                sorter.close();
            }
        }
    } catch (Exception e) {
        System.err.println(e.getMessage());
        e.printStackTrace();
        Assert.fail(e.getMessage());
    } finally {
        if (input != null) {
            input.delete();
        }
        if (sorted != null) {
            sorted.delete();
        }
    }
}
Also used : IOManager(org.apache.flink.runtime.io.disk.iomanager.IOManager) RuntimeSerializerFactory(org.apache.flink.api.java.typeutils.runtime.RuntimeSerializerFactory) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) IOException(java.io.IOException) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) IOManagerAsync(org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync) Tuple2(org.apache.flink.api.java.tuple.Tuple2) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) DummyInvokable(org.apache.flink.runtime.operators.testutils.DummyInvokable) File(java.io.File)

Example 70 with ExecutionConfig

use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.

the class MassiveStringValueSorting method testStringValueTuplesSorting.

@SuppressWarnings("unchecked")
public void testStringValueTuplesSorting() {
    final int NUM_STRINGS = 300000;
    File input = null;
    File sorted = null;
    try {
        // the source file
        input = generateFileWithStringTuples(NUM_STRINGS, "http://some-uri.com/that/is/a/common/prefix/to/all");
        // the sorted file
        sorted = File.createTempFile("sorted_strings", "txt");
        String[] command = { "/bin/bash", "-c", "export LC_ALL=\"C\" && cat \"" + input.getAbsolutePath() + "\" | sort > \"" + sorted.getAbsolutePath() + "\"" };
        Process p = null;
        try {
            p = Runtime.getRuntime().exec(command);
            int retCode = p.waitFor();
            if (retCode != 0) {
                throw new Exception("Command failed with return code " + retCode);
            }
            p = null;
        } finally {
            if (p != null) {
                p.destroy();
            }
        }
        // sort the data
        UnilateralSortMerger<Tuple2<StringValue, StringValue[]>> sorter = null;
        BufferedReader reader = null;
        BufferedReader verifyReader = null;
        try {
            MemoryManager mm = new MemoryManager(1024 * 1024, 1);
            IOManager ioMan = new IOManagerAsync();
            TupleTypeInfo<Tuple2<StringValue, StringValue[]>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, StringValue[]>>) TypeInfoParser.<Tuple2<StringValue, StringValue[]>>parse("Tuple2<org.apache.flink.types.StringValue, org.apache.flink.types.StringValue[]>");
            TypeSerializer<Tuple2<StringValue, StringValue[]>> serializer = typeInfo.createSerializer(new ExecutionConfig());
            TypeComparator<Tuple2<StringValue, StringValue[]>> comparator = typeInfo.createComparator(new int[] { 0 }, new boolean[] { true }, 0, new ExecutionConfig());
            reader = new BufferedReader(new FileReader(input));
            MutableObjectIterator<Tuple2<StringValue, StringValue[]>> inputIterator = new StringValueTupleReaderMutableObjectIterator(reader);
            sorter = new UnilateralSortMerger<Tuple2<StringValue, StringValue[]>>(mm, ioMan, inputIterator, new DummyInvokable(), new RuntimeSerializerFactory<Tuple2<StringValue, StringValue[]>>(serializer, (Class<Tuple2<StringValue, StringValue[]>>) (Class<?>) Tuple2.class), comparator, 1.0, 4, 0.8f, true, /* use large record handler */
            false);
            // use this part to verify that all if good when sorting in memory
            //				List<MemorySegment> memory = mm.allocatePages(new DummyInvokable(), mm.computeNumberOfPages(1024*1024*1024));
            //				NormalizedKeySorter<Tuple2<String, String[]>> nks = new NormalizedKeySorter<Tuple2<String,String[]>>(serializer, comparator, memory);
            //
            //				{
            //					Tuple2<String, String[]> wi = new Tuple2<String, String[]>("", new String[0]);
            //					while ((wi = inputIterator.next(wi)) != null) {
            //						Assert.assertTrue(nks.write(wi));
            //					}
            //					
            //					new QuickSort().sort(nks);
            //				}
            //				
            //				MutableObjectIterator<Tuple2<String, String[]>> sortedData = nks.getIterator();
            MutableObjectIterator<Tuple2<StringValue, StringValue[]>> sortedData = sorter.getIterator();
            reader.close();
            // verify
            verifyReader = new BufferedReader(new FileReader(sorted));
            MutableObjectIterator<Tuple2<StringValue, StringValue[]>> verifyIterator = new StringValueTupleReaderMutableObjectIterator(verifyReader);
            Tuple2<StringValue, StringValue[]> nextVerify = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
            Tuple2<StringValue, StringValue[]> nextFromFlinkSort = new Tuple2<StringValue, StringValue[]>(new StringValue(), new StringValue[0]);
            int num = 0;
            while ((nextVerify = verifyIterator.next(nextVerify)) != null) {
                num++;
                nextFromFlinkSort = sortedData.next(nextFromFlinkSort);
                Assert.assertNotNull(nextFromFlinkSort);
                Assert.assertEquals(nextVerify.f0, nextFromFlinkSort.f0);
                Assert.assertArrayEquals(nextVerify.f1, nextFromFlinkSort.f1);
            }
            Assert.assertNull(sortedData.next(nextFromFlinkSort));
            Assert.assertEquals(NUM_STRINGS, num);
        } finally {
            if (reader != null) {
                reader.close();
            }
            if (verifyReader != null) {
                verifyReader.close();
            }
            if (sorter != null) {
                sorter.close();
            }
        }
    } catch (Exception e) {
        System.err.println(e.getMessage());
        e.printStackTrace();
        Assert.fail(e.getMessage());
    } finally {
        if (input != null) {
            //noinspection ResultOfMethodCallIgnored
            input.delete();
        }
        if (sorted != null) {
            //noinspection ResultOfMethodCallIgnored
            sorted.delete();
        }
    }
}
Also used : RuntimeSerializerFactory(org.apache.flink.api.java.typeutils.runtime.RuntimeSerializerFactory) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) IOManagerAsync(org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync) FileReader(java.io.FileReader) DummyInvokable(org.apache.flink.runtime.operators.testutils.DummyInvokable) StringValue(org.apache.flink.types.StringValue) IOManager(org.apache.flink.runtime.io.disk.iomanager.IOManager) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) IOException(java.io.IOException) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) Tuple2(org.apache.flink.api.java.tuple.Tuple2) BufferedReader(java.io.BufferedReader) File(java.io.File)

Aggregations

ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)306 Test (org.junit.Test)229 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)72 Configuration (org.apache.flink.configuration.Configuration)67 JobID (org.apache.flink.api.common.JobID)49 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)49 ArrayList (java.util.ArrayList)41 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)41 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)40 IOException (java.io.IOException)35 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)35 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)31 JobVertex (org.apache.flink.runtime.jobgraph.JobVertex)31 Watermark (org.apache.flink.streaming.api.watermark.Watermark)31 Scheduler (org.apache.flink.runtime.jobmanager.scheduler.Scheduler)29 JobVertexID (org.apache.flink.runtime.jobgraph.JobVertexID)28 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)26 Tuple3 (org.apache.flink.api.java.tuple.Tuple3)26 NoRestartStrategy (org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy)25 TaskInfo (org.apache.flink.api.common.TaskInfo)24