Search in sources :

Example 1 with MutableObjectIterator

use of org.apache.flink.util.MutableObjectIterator in project flink by apache.

the class SpillingThread method getMergingIterator.

// ------------------------------------------------------------------------
// Result Merging
// ------------------------------------------------------------------------
/**
 * Returns an iterator that iterates over the merged result from all given channels.
 *
 * @param channelIDs The channels that are to be merged and returned.
 * @param inputSegments The buffers to be used for reading. The list contains for each channel
 *     one list of input segments. The size of the <code>inputSegments</code> list must be equal
 *     to that of the <code>channelIDs</code> list.
 * @return An iterator over the merged records of the input channels.
 * @throws IOException Thrown, if the readers encounter an I/O problem.
 */
private MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs, final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords) throws IOException {
    // create one iterator per channel id
    LOG.debug("Performing merge of {} sorted streams.", channelIDs.size());
    final List<MutableObjectIterator<E>> iterators = new ArrayList<>(channelIDs.size() + 1);
    for (int i = 0; i < channelIDs.size(); i++) {
        final ChannelWithBlockCount channel = channelIDs.get(i);
        final List<MemorySegment> segsForChannel = inputSegments.get(i);
        // create a reader. if there are multiple segments for the reader, issue multiple
        // together per I/O request
        final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel());
        readerList.add(reader);
        spillChannelManager.registerOpenChannelToBeRemovedAtShutdown(reader);
        spillChannelManager.unregisterChannelToBeRemovedAtShutdown(channel.getChannel());
        // wrap channel reader as a view, to get block spanning record deserialization
        final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, channel.getBlockCount(), false);
        iterators.add(new ChannelReaderInputViewIterator<>(inView, null, this.serializer));
    }
    if (largeRecords != null) {
        iterators.add(largeRecords);
    }
    return new MergeIterator<>(iterators, this.comparator);
}
Also used : ChannelReaderInputView(org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView) EmptyMutableObjectIterator(org.apache.flink.runtime.util.EmptyMutableObjectIterator) MutableObjectIterator(org.apache.flink.util.MutableObjectIterator) ArrayList(java.util.ArrayList) MemorySegment(org.apache.flink.core.memory.MemorySegment)

Example 2 with MutableObjectIterator

use of org.apache.flink.util.MutableObjectIterator in project flink by apache.

the class NonReusingHashJoinIteratorITCase method testBuildSecondWithHighNumberOfCommonKeys.

@Test
public void testBuildSecondWithHighNumberOfCommonKeys() {
    // the size of the left and right inputs
    final int INPUT_1_SIZE = 200;
    final int INPUT_2_SIZE = 100;
    final int INPUT_1_DUPLICATES = 10;
    final int INPUT_2_DUPLICATES = 2000;
    final int DUPLICATE_KEY = 13;
    try {
        TupleGenerator generator1 = new TupleGenerator(SEED1, 500, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TupleGenerator generator2 = new TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator gen1Iter = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator gen2Iter = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        final TestData.TupleConstantValueIterator const1Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "LEFT String for Duplicate Keys", INPUT_1_DUPLICATES);
        final TestData.TupleConstantValueIterator const2Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "RIGHT String for Duplicate Keys", INPUT_2_DUPLICATES);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        MutableObjectIterator<Tuple2<Integer, String>> input1 = new UnionIterator<>(inList1);
        MutableObjectIterator<Tuple2<Integer, String>> input2 = new UnionIterator<>(inList2);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = joinTuples(collectTupleData(input1), collectTupleData(input2));
        // re-create the whole thing for actual processing
        // reset the generators and iterators
        generator1.reset();
        generator2.reset();
        const1Iter.reset();
        const2Iter.reset();
        gen1Iter.reset();
        gen2Iter.reset();
        inList1.clear();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        inList2.clear();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        input1 = new UnionIterator<>(inList1);
        input2 = new UnionIterator<>(inList2);
        final TupleMatchRemovingJoin matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        NonReusingBuildSecondHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new NonReusingBuildSecondHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) MutableObjectIterator(org.apache.flink.util.MutableObjectIterator) UnionIterator(org.apache.flink.runtime.operators.testutils.UnionIterator) ArrayList(java.util.ArrayList) TupleGenerator(org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Example 3 with MutableObjectIterator

use of org.apache.flink.util.MutableObjectIterator in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildFirstWithHighNumberOfCommonKeys.

@Test
public void testBuildFirstWithHighNumberOfCommonKeys() {
    // the size of the left and right inputs
    final int INPUT_1_SIZE = 200;
    final int INPUT_2_SIZE = 100;
    final int INPUT_1_DUPLICATES = 10;
    final int INPUT_2_DUPLICATES = 2000;
    final int DUPLICATE_KEY = 13;
    try {
        TestData.TupleGenerator generator1 = new TestData.TupleGenerator(SEED1, 500, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator gen1Iter = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator gen2Iter = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        final TestData.TupleConstantValueIterator const1Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "LEFT String for Duplicate Keys", INPUT_1_DUPLICATES);
        final TestData.TupleConstantValueIterator const2Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "RIGHT String for Duplicate Keys", INPUT_2_DUPLICATES);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        MutableObjectIterator<Tuple2<Integer, String>> input1 = new UnionIterator<>(inList1);
        MutableObjectIterator<Tuple2<Integer, String>> input2 = new UnionIterator<>(inList2);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = joinTuples(collectTupleData(input1), collectTupleData(input2));
        // re-create the whole thing for actual processing
        // reset the generators and iterators
        generator1.reset();
        generator2.reset();
        const1Iter.reset();
        const2Iter.reset();
        gen1Iter.reset();
        gen2Iter.reset();
        inList1.clear();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        inList2.clear();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        input1 = new UnionIterator<>(inList1);
        input2 = new UnionIterator<>(inList2);
        final FlatJoinFunction matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        ReusingBuildFirstHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildFirstHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) MutableObjectIterator(org.apache.flink.util.MutableObjectIterator) UnionIterator(org.apache.flink.runtime.operators.testutils.UnionIterator) ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Example 4 with MutableObjectIterator

use of org.apache.flink.util.MutableObjectIterator in project flink by apache.

the class ReusingHashJoinIteratorITCase method testBuildSecondWithHighNumberOfCommonKeys.

@Test
public void testBuildSecondWithHighNumberOfCommonKeys() {
    // the size of the left and right inputs
    final int INPUT_1_SIZE = 200;
    final int INPUT_2_SIZE = 100;
    final int INPUT_1_DUPLICATES = 10;
    final int INPUT_2_DUPLICATES = 2000;
    final int DUPLICATE_KEY = 13;
    try {
        TestData.TupleGenerator generator1 = new TestData.TupleGenerator(SEED1, 500, 4096, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        TestData.TupleGenerator generator2 = new TestData.TupleGenerator(SEED2, 500, 2048, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
        final TestData.TupleGeneratorIterator gen1Iter = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
        final TestData.TupleGeneratorIterator gen2Iter = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
        final TestData.TupleConstantValueIterator const1Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "LEFT String for Duplicate Keys", INPUT_1_DUPLICATES);
        final TestData.TupleConstantValueIterator const2Iter = new TestData.TupleConstantValueIterator(DUPLICATE_KEY, "RIGHT String for Duplicate Keys", INPUT_2_DUPLICATES);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        MutableObjectIterator<Tuple2<Integer, String>> input1 = new UnionIterator<>(inList1);
        MutableObjectIterator<Tuple2<Integer, String>> input2 = new UnionIterator<>(inList2);
        // collect expected data
        final Map<Integer, Collection<TupleMatch>> expectedMatchesMap = joinTuples(collectTupleData(input1), collectTupleData(input2));
        // re-create the whole thing for actual processing
        // reset the generators and iterators
        generator1.reset();
        generator2.reset();
        const1Iter.reset();
        const2Iter.reset();
        gen1Iter.reset();
        gen2Iter.reset();
        inList1.clear();
        inList1.add(gen1Iter);
        inList1.add(const1Iter);
        inList2.clear();
        inList2.add(gen2Iter);
        inList2.add(const2Iter);
        input1 = new UnionIterator<>(inList1);
        input2 = new UnionIterator<>(inList2);
        final FlatJoinFunction matcher = new TupleMatchRemovingJoin(expectedMatchesMap);
        final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
        ReusingBuildSecondHashJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingBuildSecondHashJoinIterator<>(input1, input2, this.recordSerializer, this.record1Comparator, this.recordSerializer, this.record2Comparator, this.recordPairComparator, this.memoryManager, ioManager, this.parentTask, 1.0, false, false, true);
        iterator.open();
        while (iterator.callWithNextKey(matcher, collector)) ;
        iterator.close();
        // assert that each expected match was seen
        for (Entry<Integer, Collection<TupleMatch>> entry : expectedMatchesMap.entrySet()) {
            if (!entry.getValue().isEmpty()) {
                Assert.fail("Collection for key " + entry.getKey() + " is not empty");
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An exception occurred during the test: " + e.getMessage());
    }
}
Also used : TestData(org.apache.flink.runtime.operators.testutils.TestData) MutableObjectIterator(org.apache.flink.util.MutableObjectIterator) UnionIterator(org.apache.flink.runtime.operators.testutils.UnionIterator) ArrayList(java.util.ArrayList) FlatJoinFunction(org.apache.flink.api.common.functions.FlatJoinFunction) NullKeyFieldException(org.apache.flink.types.NullKeyFieldException) DiscardingOutputCollector(org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Collection(java.util.Collection) Test(org.junit.Test)

Example 5 with MutableObjectIterator

use of org.apache.flink.util.MutableObjectIterator in project flink by apache.

the class ExternalSortLargeRecordsITCase method testSortWithMediumRecordsOnly.

@Test
public void testSortWithMediumRecordsOnly() {
    try {
        final int NUM_RECORDS = 70;
        final TypeInformation<?>[] types = new TypeInformation<?>[] { BasicTypeInfo.LONG_TYPE_INFO, new ValueTypeInfo<SmallOrMediumOrLargeValue>(SmallOrMediumOrLargeValue.class) };
        final TupleTypeInfo<Tuple2<Long, SmallOrMediumOrLargeValue>> typeInfo = new TupleTypeInfo<Tuple2<Long, SmallOrMediumOrLargeValue>>(types);
        final TypeSerializer<Tuple2<Long, SmallOrMediumOrLargeValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
        final TypeComparator<Tuple2<Long, SmallOrMediumOrLargeValue>> comparator = typeInfo.createComparator(new int[] { 0 }, new boolean[] { false }, 0, new ExecutionConfig());
        MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> source = new MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>>() {

            private final Random rnd = new Random(62360187263087678L);

            private int num = -1;

            @Override
            public Tuple2<Long, SmallOrMediumOrLargeValue> next(Tuple2<Long, SmallOrMediumOrLargeValue> reuse) {
                return next();
            }

            @Override
            public Tuple2<Long, SmallOrMediumOrLargeValue> next() {
                if (++num < NUM_RECORDS) {
                    long val = rnd.nextLong();
                    return new Tuple2<Long, SmallOrMediumOrLargeValue>(val, new SmallOrMediumOrLargeValue((int) val, SmallOrMediumOrLargeValue.MEDIUM_SIZE));
                } else {
                    return null;
                }
            }
        };
        Sorter<Tuple2<Long, SmallOrMediumOrLargeValue>> sorter = ExternalSorter.newBuilder(this.memoryManager, this.parentTask, serializer, comparator).maxNumFileHandles(128).sortBuffers(1).enableSpilling(ioManager, 0.7f).memoryFraction(1.0).objectReuse(true).largeRecords(true).build(source);
        // check order
        MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> iterator = sorter.getIterator();
        Tuple2<Long, SmallOrMediumOrLargeValue> val = serializer.createInstance();
        long prevKey = Long.MAX_VALUE;
        for (int i = 0; i < NUM_RECORDS; i++) {
            val = iterator.next(val);
            assertTrue(val.f0 <= prevKey);
            assertTrue(val.f0.intValue() == val.f1.val());
        }
        assertNull(iterator.next(val));
        sorter.close();
        testSuccess = true;
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : MutableObjectIterator(org.apache.flink.util.MutableObjectIterator) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) IOException(java.io.IOException) Random(java.util.Random) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ValueTypeInfo(org.apache.flink.api.java.typeutils.ValueTypeInfo) Test(org.junit.Test)

Aggregations

MutableObjectIterator (org.apache.flink.util.MutableObjectIterator)42 Test (org.junit.Test)32 ArrayList (java.util.ArrayList)26 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)19 UnionIterator (org.apache.flink.runtime.operators.testutils.UnionIterator)15 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)15 MemorySegment (org.apache.flink.core.memory.MemorySegment)12 Collection (java.util.Collection)10 HashMap (java.util.HashMap)9 Map (java.util.Map)9 IOException (java.io.IOException)7 MemoryAllocationException (org.apache.flink.runtime.memory.MemoryAllocationException)7 DiscardingOutputCollector (org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector)7 TestData (org.apache.flink.runtime.operators.testutils.TestData)7 TupleGenerator (org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator)7 UniformBinaryRowGenerator (org.apache.flink.table.runtime.util.UniformBinaryRowGenerator)7 MemoryManager (org.apache.flink.runtime.memory.MemoryManager)6 NullKeyFieldException (org.apache.flink.types.NullKeyFieldException)6 BitSet (java.util.BitSet)4 Random (java.util.Random)4