use of org.apache.flink.api.common.typeutils.GenericPairComparator in project flink by apache.
the class NonReusingHashJoinIteratorITCase method beforeTest.
@SuppressWarnings("unchecked")
@Before
public void beforeTest() {
this.recordSerializer = TestData.getIntStringTupleSerializer();
this.record1Comparator = TestData.getIntStringTupleComparator();
this.record2Comparator = TestData.getIntStringTupleComparator();
this.recordPairComparator = new GenericPairComparator(record1Comparator, record2Comparator);
this.pairSerializer = new IntPairSerializer();
this.pairComparator = new TestData.IntPairComparator();
this.pairRecordPairComparator = new IntPairTuplePairComparator();
this.recordPairPairComparator = new TupleIntPairPairComparator();
this.memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).build();
this.ioManager = new IOManagerAsync();
}
use of org.apache.flink.api.common.typeutils.GenericPairComparator in project flink by apache.
the class ReOpenableHashTableITCase method beforeTest.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Before
public void beforeTest() {
this.recordBuildSideAccesssor = TestData.getIntIntTupleSerializer();
this.recordProbeSideAccesssor = TestData.getIntIntTupleSerializer();
this.recordBuildSideComparator = TestData.getIntIntTupleComparator();
this.recordProbeSideComparator = TestData.getIntIntTupleComparator();
this.pactRecordComparator = new GenericPairComparator(this.recordBuildSideComparator, this.recordProbeSideComparator);
this.memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).setPageSize(PAGE_SIZE).build();
this.ioManager = new IOManagerAsync();
}
use of org.apache.flink.api.common.typeutils.GenericPairComparator in project flink by apache.
the class ReusingHashJoinIteratorITCase method beforeTest.
@SuppressWarnings("unchecked")
@Before
public void beforeTest() {
this.recordSerializer = TestData.getIntStringTupleSerializer();
this.record1Comparator = TestData.getIntStringTupleComparator();
this.record2Comparator = TestData.getIntStringTupleComparator();
this.recordPairComparator = new GenericPairComparator(this.record1Comparator, this.record2Comparator);
this.pairSerializer = new IntPairSerializer();
this.pairComparator = new TestData.IntPairComparator();
this.pairRecordPairComparator = new IntPairTuplePairComparator();
this.recordPairPairComparator = new TupleIntPairPairComparator();
this.memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).build();
this.ioManager = new IOManagerAsync();
}
use of org.apache.flink.api.common.typeutils.GenericPairComparator in project flink by apache.
the class AbstractSortMergeOuterJoinIteratorITCase method testOuterJoinWithHighNumberOfCommonKeys.
@SuppressWarnings("unchecked, rawtypes")
protected void testOuterJoinWithHighNumberOfCommonKeys(OuterJoinType outerJoinType, int input1Size, int input1Duplicates, int input1ValueLength, float input1KeyDensity, int input2Size, int input2Duplicates, int input2ValueLength, float input2KeyDensity) {
TypeSerializer<Tuple2<Integer, String>> serializer1 = new TupleSerializer<>((Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
TypeSerializer<Tuple2<Integer, String>> serializer2 = new TupleSerializer<>((Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
TypeComparator<Tuple2<Integer, String>> comparator1 = new TupleComparator<>(new int[] { 0 }, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE });
TypeComparator<Tuple2<Integer, String>> comparator2 = new TupleComparator<>(new int[] { 0 }, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE });
TypePairComparator<Tuple2<Integer, String>, Tuple2<Integer, String>> pairComparator = new GenericPairComparator<>(comparator1, comparator2);
final int DUPLICATE_KEY = 13;
try {
final TupleGenerator generator1 = new TupleGenerator(SEED1, 500, input1KeyDensity, input1ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, input2KeyDensity, input2ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
final TupleGeneratorIterator gen1Iter = new TupleGeneratorIterator(generator1, input1Size);
final TupleGeneratorIterator gen2Iter = new TupleGeneratorIterator(generator2, input2Size);
final TupleConstantValueIterator const1Iter = new TupleConstantValueIterator(DUPLICATE_KEY, "LEFT String for Duplicate Keys", input1Duplicates);
final TupleConstantValueIterator const2Iter = new TupleConstantValueIterator(DUPLICATE_KEY, "RIGHT String for Duplicate Keys", input2Duplicates);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
inList1.add(gen1Iter);
inList1.add(const1Iter);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
inList2.add(gen2Iter);
inList2.add(const2Iter);
MutableObjectIterator<Tuple2<Integer, String>> input1 = new MergeIterator<>(inList1, comparator1.duplicate());
MutableObjectIterator<Tuple2<Integer, String>> input2 = new MergeIterator<>(inList2, comparator2.duplicate());
// collect expected data
final Map<Integer, Collection<Match>> expectedMatchesMap = joinValues(collectData(input1), collectData(input2), outerJoinType);
// re-create the whole thing for actual processing
// reset the generators and iterators
generator1.reset();
generator2.reset();
const1Iter.reset();
const2Iter.reset();
gen1Iter.reset();
gen2Iter.reset();
inList1.clear();
inList1.add(gen1Iter);
inList1.add(const1Iter);
inList2.clear();
inList2.add(gen2Iter);
inList2.add(const2Iter);
input1 = new MergeIterator<>(inList1, comparator1.duplicate());
input2 = new MergeIterator<>(inList2, comparator2.duplicate());
final FlatJoinFunction<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> joinFunction = new MatchRemovingJoiner(expectedMatchesMap);
final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
// we create this sort-merge iterator with little memory for the block-nested-loops
// fall-back to make sure it
// needs to spill for the duplicate keys
AbstractMergeOuterJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = createOuterJoinIterator(outerJoinType, input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparator, this.memoryManager, this.ioManager, PAGES_FOR_BNLJN, this.parentTask);
iterator.open();
while (iterator.callWithNextKey(joinFunction, collector)) ;
iterator.close();
// assert that each expected match was seen
for (Entry<Integer, Collection<Match>> entry : expectedMatchesMap.entrySet()) {
if (!entry.getValue().isEmpty()) {
Assert.fail("Collection for key " + entry.getKey() + " is not empty");
}
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An exception occurred during the test: " + e.getMessage());
}
}
use of org.apache.flink.api.common.typeutils.GenericPairComparator in project flink by apache.
the class HashVsSortMiniBenchmark method beforeTest.
@SuppressWarnings("unchecked")
@Before
public void beforeTest() {
this.serializer1 = TestData.getIntStringTupleSerializerFactory();
this.serializer2 = TestData.getIntStringTupleSerializerFactory();
this.comparator1 = TestData.getIntStringTupleComparator();
this.comparator2 = TestData.getIntStringTupleComparator();
this.pairComparator11 = new GenericPairComparator(this.comparator1, this.comparator2);
this.memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).setPageSize(PAGE_SIZE).build();
this.ioManager = new IOManagerAsync();
}
Aggregations