use of org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator in project flink by apache.
the class ReusingSortMergeCoGroupIteratorITCase method testMerge.
@Test
public void testMerge() {
try {
generator1 = new TupleGenerator(SEED1, 500, 4096, KeyMode.SORTED, ValueMode.RANDOM_LENGTH);
generator2 = new TupleGenerator(SEED2, 500, 2048, KeyMode.SORTED, ValueMode.RANDOM_LENGTH);
reader1 = new TestData.TupleGeneratorIterator(generator1, INPUT_1_SIZE);
reader2 = new TestData.TupleGeneratorIterator(generator2, INPUT_2_SIZE);
// collect expected data
Map<Integer, Collection<String>> expectedStringsMap1 = collectData(generator1, INPUT_1_SIZE);
Map<Integer, Collection<String>> expectedStringsMap2 = collectData(generator2, INPUT_2_SIZE);
Map<Integer, List<Collection<String>>> expectedCoGroupsMap = coGroupValues(expectedStringsMap1, expectedStringsMap2);
// reset the generators
generator1.reset();
generator2.reset();
// compare with iterator values
ReusingSortMergeCoGroupIterator<Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = new ReusingSortMergeCoGroupIterator<>(this.reader1, this.reader2, this.serializer1, this.comparator1, this.serializer2, this.comparator2, this.pairComparator);
iterator.open();
int key = 0;
while (iterator.next()) {
Iterator<Tuple2<Integer, String>> iter1 = iterator.getValues1().iterator();
Iterator<Tuple2<Integer, String>> iter2 = iterator.getValues2().iterator();
String v1 = null;
String v2 = null;
if (iter1.hasNext()) {
Tuple2<Integer, String> rec = iter1.next();
key = rec.f0;
v1 = rec.f1;
} else if (iter2.hasNext()) {
Tuple2<Integer, String> rec = iter2.next();
key = rec.f0;
v2 = rec.f1;
} else {
Assert.fail("No input on both sides.");
}
// assert that matches for this key exist
Assert.assertTrue("No matches for key " + key, expectedCoGroupsMap.containsKey(key));
Collection<String> expValues1 = expectedCoGroupsMap.get(key).get(0);
Collection<String> expValues2 = expectedCoGroupsMap.get(key).get(1);
if (v1 != null) {
expValues1.remove(v1);
} else {
expValues2.remove(v2);
}
while (iter1.hasNext()) {
Tuple2<Integer, String> rec = iter1.next();
Assert.assertTrue("String not in expected set of first input", expValues1.remove(rec.f1));
}
Assert.assertTrue("Expected set of first input not empty", expValues1.isEmpty());
while (iter2.hasNext()) {
Tuple2<Integer, String> rec = iter2.next();
Assert.assertTrue("String not in expected set of second input", expValues2.remove(rec.f1));
}
Assert.assertTrue("Expected set of second input not empty", expValues2.isEmpty());
expectedCoGroupsMap.remove(key);
}
iterator.close();
Assert.assertTrue("Expected key set not empty", expectedCoGroupsMap.isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An exception occurred during the test: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator in project flink by apache.
the class RandomSortMergeInnerJoinTest method testMergeWithHighNumberOfCommonKeys.
@Test
public void testMergeWithHighNumberOfCommonKeys() {
// the size of the left and right inputs
final int input1Size = 200;
final int input2Size = 100;
final int input1Duplicates = 10;
final int input2Duplicates = 4000;
final int duplicateKey = 13;
try {
final TupleGenerator generator1 = new TupleGenerator(SEED1, 500, 4096, KeyMode.SORTED, ValueMode.RANDOM_LENGTH);
final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, 2048, KeyMode.SORTED, ValueMode.RANDOM_LENGTH);
final TestData.TupleGeneratorIterator gen1Iter = new TestData.TupleGeneratorIterator(generator1, input1Size);
final TestData.TupleGeneratorIterator gen2Iter = new TestData.TupleGeneratorIterator(generator2, input2Size);
final TestData.TupleConstantValueIterator const1Iter = new TestData.TupleConstantValueIterator(duplicateKey, "LEFT String for Duplicate Keys", input1Duplicates);
final TestData.TupleConstantValueIterator const2Iter = new TestData.TupleConstantValueIterator(duplicateKey, "RIGHT String for Duplicate Keys", input2Duplicates);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
inList1.add(gen1Iter);
inList1.add(const1Iter);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
inList2.add(gen2Iter);
inList2.add(const2Iter);
MutableObjectIterator<Tuple2<Integer, String>> input1 = new MergeIterator<>(inList1, comparator1.duplicate());
MutableObjectIterator<Tuple2<Integer, String>> input2 = new MergeIterator<>(inList2, comparator2.duplicate());
// collect expected data
final Map<Integer, Collection<Match>> expectedMatchesMap = matchValues(collectData(input1), collectData(input2));
// re-create the whole thing for actual processing
// reset the generators and iterators
generator1.reset();
generator2.reset();
const1Iter.reset();
const2Iter.reset();
gen1Iter.reset();
gen2Iter.reset();
inList1.clear();
inList1.add(gen1Iter);
inList1.add(const1Iter);
inList2.clear();
inList2.add(gen2Iter);
inList2.add(const2Iter);
input1 = new MergeIterator<>(inList1, comparator1.duplicate());
input2 = new MergeIterator<>(inList2, comparator2.duplicate());
StreamOperator operator = getOperator();
match(expectedMatchesMap, transformToBinary(join(operator, input1, input2)));
// assert that each expected match was seen
for (Map.Entry<Integer, Collection<Match>> entry : expectedMatchesMap.entrySet()) {
if (!entry.getValue().isEmpty()) {
Assert.fail("Collection for key " + entry.getKey() + " is not empty");
}
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An exception occurred during the test: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator in project flink by apache.
the class RandomSortMergeOuterJoinTest method testOuterJoinWithHighNumberOfCommonKeys.
@SuppressWarnings("unchecked, rawtypes")
protected void testOuterJoinWithHighNumberOfCommonKeys(FlinkJoinType outerJoinType, int input1Size, int input1Duplicates, int input1ValueLength, float input1KeyDensity, int input2Size, int input2Duplicates, int input2ValueLength, float input2KeyDensity) {
TypeComparator<Tuple2<Integer, String>> comparator1 = new TupleComparator<>(new int[] { 0 }, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE });
TypeComparator<Tuple2<Integer, String>> comparator2 = new TupleComparator<>(new int[] { 0 }, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE });
final int duplicateKey = 13;
try {
final TupleGenerator generator1 = new TupleGenerator(SEED1, 500, input1KeyDensity, input1ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, input2KeyDensity, input2ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
final TupleGeneratorIterator gen1Iter = new TupleGeneratorIterator(generator1, input1Size);
final TupleGeneratorIterator gen2Iter = new TupleGeneratorIterator(generator2, input2Size);
final TupleConstantValueIterator const1Iter = new TupleConstantValueIterator(duplicateKey, "LEFT String for Duplicate Keys", input1Duplicates);
final TupleConstantValueIterator const2Iter = new TupleConstantValueIterator(duplicateKey, "RIGHT String for Duplicate Keys", input2Duplicates);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
inList1.add(gen1Iter);
inList1.add(const1Iter);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
inList2.add(gen2Iter);
inList2.add(const2Iter);
MutableObjectIterator<Tuple2<Integer, String>> input1 = new MergeIterator<>(inList1, comparator1.duplicate());
MutableObjectIterator<Tuple2<Integer, String>> input2 = new MergeIterator<>(inList2, comparator2.duplicate());
// collect expected data
final Map<Integer, Collection<Match>> expectedMatchesMap = joinValues(RandomSortMergeInnerJoinTest.collectData(input1), RandomSortMergeInnerJoinTest.collectData(input2), outerJoinType);
// re-create the whole thing for actual processing
// reset the generators and iterators
generator1.reset();
generator2.reset();
const1Iter.reset();
const2Iter.reset();
gen1Iter.reset();
gen2Iter.reset();
inList1.clear();
inList1.add(gen1Iter);
inList1.add(const1Iter);
inList2.clear();
inList2.add(gen2Iter);
inList2.add(const2Iter);
input1 = new MergeIterator<>(inList1, comparator1.duplicate());
input2 = new MergeIterator<>(inList2, comparator2.duplicate());
StreamOperator operator = getOperator(outerJoinType);
RandomSortMergeInnerJoinTest.match(expectedMatchesMap, RandomSortMergeInnerJoinTest.transformToBinary(myJoin(operator, input1, input2)));
// assert that each expected match was seen
for (Entry<Integer, Collection<Match>> entry : expectedMatchesMap.entrySet()) {
if (!entry.getValue().isEmpty()) {
Assert.fail("Collection for key " + entry.getKey() + " is not empty");
}
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An exception occurred during the test: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator in project flink by apache.
the class ReOpenableHashTableTestBase method testDoubleProbeSpilling.
/**
* Verify proper operation if the build side is spilled to disk.
*/
@Test
public void testDoubleProbeSpilling() {
int buildSize = 1000;
int probeSize = 1000;
try {
TupleGenerator bgen = new TupleGenerator(SEED1, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH);
TupleGenerator pgen = new TupleGenerator(SEED2, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH);
final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize);
final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize);
doTest(buildInput, probeInput, bgen, pgen);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An exception occurred during the test: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator in project flink by apache.
the class AbstractSortMergeOuterJoinIteratorITCase method testOuterJoinWithHighNumberOfCommonKeys.
@SuppressWarnings("unchecked, rawtypes")
protected void testOuterJoinWithHighNumberOfCommonKeys(OuterJoinType outerJoinType, int input1Size, int input1Duplicates, int input1ValueLength, float input1KeyDensity, int input2Size, int input2Duplicates, int input2ValueLength, float input2KeyDensity) {
TypeSerializer<Tuple2<Integer, String>> serializer1 = new TupleSerializer<>((Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
TypeSerializer<Tuple2<Integer, String>> serializer2 = new TupleSerializer<>((Class<Tuple2<Integer, String>>) (Class<?>) Tuple2.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE });
TypeComparator<Tuple2<Integer, String>> comparator1 = new TupleComparator<>(new int[] { 0 }, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE });
TypeComparator<Tuple2<Integer, String>> comparator2 = new TupleComparator<>(new int[] { 0 }, new TypeComparator<?>[] { new IntComparator(true) }, new TypeSerializer<?>[] { IntSerializer.INSTANCE });
TypePairComparator<Tuple2<Integer, String>, Tuple2<Integer, String>> pairComparator = new GenericPairComparator<>(comparator1, comparator2);
final int DUPLICATE_KEY = 13;
try {
final TupleGenerator generator1 = new TupleGenerator(SEED1, 500, input1KeyDensity, input1ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, input2KeyDensity, input2ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
final TupleGeneratorIterator gen1Iter = new TupleGeneratorIterator(generator1, input1Size);
final TupleGeneratorIterator gen2Iter = new TupleGeneratorIterator(generator2, input2Size);
final TupleConstantValueIterator const1Iter = new TupleConstantValueIterator(DUPLICATE_KEY, "LEFT String for Duplicate Keys", input1Duplicates);
final TupleConstantValueIterator const2Iter = new TupleConstantValueIterator(DUPLICATE_KEY, "RIGHT String for Duplicate Keys", input2Duplicates);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
inList1.add(gen1Iter);
inList1.add(const1Iter);
final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
inList2.add(gen2Iter);
inList2.add(const2Iter);
MutableObjectIterator<Tuple2<Integer, String>> input1 = new MergeIterator<>(inList1, comparator1.duplicate());
MutableObjectIterator<Tuple2<Integer, String>> input2 = new MergeIterator<>(inList2, comparator2.duplicate());
// collect expected data
final Map<Integer, Collection<Match>> expectedMatchesMap = joinValues(collectData(input1), collectData(input2), outerJoinType);
// re-create the whole thing for actual processing
// reset the generators and iterators
generator1.reset();
generator2.reset();
const1Iter.reset();
const2Iter.reset();
gen1Iter.reset();
gen2Iter.reset();
inList1.clear();
inList1.add(gen1Iter);
inList1.add(const1Iter);
inList2.clear();
inList2.add(gen2Iter);
inList2.add(const2Iter);
input1 = new MergeIterator<>(inList1, comparator1.duplicate());
input2 = new MergeIterator<>(inList2, comparator2.duplicate());
final FlatJoinFunction<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> joinFunction = new MatchRemovingJoiner(expectedMatchesMap);
final Collector<Tuple2<Integer, String>> collector = new DiscardingOutputCollector<>();
// we create this sort-merge iterator with little memory for the block-nested-loops
// fall-back to make sure it
// needs to spill for the duplicate keys
AbstractMergeOuterJoinIterator<Tuple2<Integer, String>, Tuple2<Integer, String>, Tuple2<Integer, String>> iterator = createOuterJoinIterator(outerJoinType, input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparator, this.memoryManager, this.ioManager, PAGES_FOR_BNLJN, this.parentTask);
iterator.open();
while (iterator.callWithNextKey(joinFunction, collector)) ;
iterator.close();
// assert that each expected match was seen
for (Entry<Integer, Collection<Match>> entry : expectedMatchesMap.entrySet()) {
if (!entry.getValue().isEmpty()) {
Assert.fail("Collection for key " + entry.getKey() + " is not empty");
}
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An exception occurred during the test: " + e.getMessage());
}
}
Aggregations