use of org.apache.flink.runtime.operators.testutils.types.IntPair in project flink by apache.
the class CompactingHashTableTest method testTripleResize.
@Test
public void testTripleResize() {
// Only CompactingHashTable
try {
final int NUM_MEM_PAGES = 30 * NUM_PAIRS / PAGE_SIZE;
final Random rnd = new Random(RANDOM_SEED);
final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);
List<MemorySegment> memory = getMemory(NUM_MEM_PAGES);
CompactingHashTable<IntPair> table = new CompactingHashTable<IntPair>(intPairSerializer, intPairComparator, memory);
table.open();
for (int i = 0; i < NUM_PAIRS; i++) {
table.insert(pairs[i]);
}
AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(intPairComparator, new SameTypePairComparator<>(intPairComparator));
IntPair target = new IntPair();
for (int i = 0; i < NUM_PAIRS; i++) {
assertNotNull(prober.getMatchFor(pairs[i], target));
assertEquals(pairs[i].getValue(), target.getValue());
}
// make sure there is enough memory for resize
memory.addAll(getMemory(ADDITIONAL_MEM));
Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
assertTrue(b);
for (int i = 0; i < NUM_PAIRS; i++) {
assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
assertEquals(pairs[i].getValue(), target.getValue());
}
// make sure there is enough memory for resize
memory.addAll(getMemory(ADDITIONAL_MEM));
b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
assertTrue(b);
for (int i = 0; i < NUM_PAIRS; i++) {
assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
assertEquals(pairs[i].getValue(), target.getValue());
}
// make sure there is enough memory for resize
memory.addAll(getMemory(2 * ADDITIONAL_MEM));
b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
assertTrue(b);
for (int i = 0; i < NUM_PAIRS; i++) {
assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
assertEquals(pairs[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES + 4 * ADDITIONAL_MEM, table.getFreeMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.types.IntPair in project flink by apache.
the class HashTableITCase method testInMemoryMutableHashTableIntPair.
// ============================================================================================
// Integer Pairs based Tests
// ============================================================================================
@Test
public void testInMemoryMutableHashTableIntPair() throws IOException {
final int NUM_KEYS = 100000;
final int BUILD_VALS_PER_KEY = 3;
final int PROBE_VALS_PER_KEY = 10;
// create a build input that gives 3 million pairs with 3 values sharing the same key
MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false);
// create a probe input that gives 10 million pairs with 10 values sharing a key
MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true);
// allocate the memory for the HashTable
List<MemorySegment> memSegments;
try {
memSegments = this.memManager.allocatePages(MEM_OWNER, 896);
} catch (MemoryAllocationException maex) {
fail("Memory for the Join could not be provided.");
return;
}
// create the I/O access for spilling
final IOManager ioManager = new IOManagerAsync();
// ----------------------------------------------------------------------------------------
final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>(this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager);
join.open(buildInput, probeInput);
final IntPair recordReuse = new IntPair();
int numRecordsInJoinResult = 0;
while (join.nextRecord()) {
MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator();
while (buildSide.next(recordReuse) != null) {
numRecordsInJoinResult++;
}
}
Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult);
join.close();
// ----------------------------------------------------------------------------------------
this.memManager.release(join.getFreedMemory());
}
use of org.apache.flink.runtime.operators.testutils.types.IntPair in project flink by apache.
the class HashTableITCase method testBucketsNotFulfillSegment.
@Test
public void testBucketsNotFulfillSegment() throws Exception {
final int NUM_KEYS = 10000;
final int BUILD_VALS_PER_KEY = 3;
final int PROBE_VALS_PER_KEY = 10;
// create a build input that gives 30000 pairs with 3 values sharing the same key
MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false);
// create a probe input that gives 100000 pairs with 10 values sharing a key
MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true);
// allocate the memory for the HashTable
List<MemorySegment> memSegments;
try {
// 33 is minimum number of pages required to perform hash join this inputs
memSegments = this.memManager.allocatePages(MEM_OWNER, 33);
} catch (MemoryAllocationException maex) {
fail("Memory for the Join could not be provided.");
return;
}
// at the end of buffer.
for (MemorySegment segment : memSegments) {
int newBucketOffset = segment.size() - 128;
// initialize the header fields
segment.put(newBucketOffset + 0, (byte) 0);
segment.put(newBucketOffset + 1, (byte) 0);
segment.putShort(newBucketOffset + 2, (short) -1);
segment.putLong(newBucketOffset + 4, ~0x0L);
}
// ----------------------------------------------------------------------------------------
final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>(this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager);
join.open(buildInput, probeInput);
final IntPair recordReuse = new IntPair();
int numRecordsInJoinResult = 0;
while (join.nextRecord()) {
MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator();
while (buildSide.next(recordReuse) != null) {
numRecordsInJoinResult++;
}
}
Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult);
join.close();
this.memManager.release(join.getFreedMemory());
}
use of org.apache.flink.runtime.operators.testutils.types.IntPair in project flink by apache.
the class HashTablePerformanceComparison method testMutableHashMapPerformance.
@Test
public void testMutableHashMapPerformance() {
try {
final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE;
MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(0, 1, false);
MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
long start;
long end;
long first = System.currentTimeMillis();
System.out.println("Creating and filling MutableHashMap...");
start = System.currentTimeMillis();
MutableHashTable<IntPair, IntPair> table = new MutableHashTable<IntPair, IntPair>(serializer, serializer, comparator, comparator, pairComparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE), ioManager);
table.open(buildInput, probeInput);
end = System.currentTimeMillis();
System.out.println("HashMap ready. Time: " + (end - start) + " ms");
System.out.println("Starting first probing run...");
start = System.currentTimeMillis();
IntPair compare = new IntPair();
HashBucketIterator<IntPair, IntPair> iter;
IntPair target = new IntPair();
while (probeTester.next(compare) != null) {
iter = table.getMatchesFor(compare);
iter.next(target);
assertEquals(target.getKey(), compare.getKey());
assertEquals(target.getValue(), compare.getValue());
assertTrue(iter.next(target) == null);
}
end = System.currentTimeMillis();
System.out.println("Probing done. Time: " + (end - start) + " ms");
System.out.println("Starting update...");
start = System.currentTimeMillis();
while (updater.next(compare) != null) {
compare.setValue(compare.getValue() + 1);
iter = table.getMatchesFor(compare);
iter.next(target);
iter.writeBack(compare);
//assertFalse(iter.next(target));
}
end = System.currentTimeMillis();
System.out.println("Update done. Time: " + (end - start) + " ms");
System.out.println("Starting second probing run...");
start = System.currentTimeMillis();
while (updateTester.next(compare) != null) {
compare.setValue(compare.getValue() + 1);
iter = table.getMatchesFor(compare);
iter.next(target);
assertEquals(target.getKey(), compare.getKey());
assertEquals(target.getValue(), compare.getValue());
assertTrue(iter.next(target) == null);
}
end = System.currentTimeMillis();
System.out.println("Probing done. Time: " + (end - start) + " ms");
table.close();
end = System.currentTimeMillis();
System.out.println("Overall time: " + (end - first) + " ms");
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreedMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.types.IntPair in project flink by apache.
the class HashTablePerformanceComparison method testInPlaceMutableHashTablePerformance.
@Test
public void testInPlaceMutableHashTablePerformance() {
try {
final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE;
MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
long start;
long end;
long first = System.currentTimeMillis();
System.out.println("Creating and filling InPlaceMutableHashTable...");
start = System.currentTimeMillis();
InPlaceMutableHashTable<IntPair> table = new InPlaceMutableHashTable<>(serializer, comparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE));
table.open();
IntPair target = new IntPair();
while (buildInput.next(target) != null) {
table.insert(target);
}
end = System.currentTimeMillis();
System.out.println("HashMap ready. Time: " + (end - start) + " ms");
System.out.println("Starting first probing run...");
start = System.currentTimeMillis();
AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(comparator, pairComparator);
IntPair temp = new IntPair();
while (probeTester.next(target) != null) {
assertNotNull(prober.getMatchFor(target, temp));
assertEquals(temp.getValue(), target.getValue());
}
end = System.currentTimeMillis();
System.out.println("Probing done. Time: " + (end - start) + " ms");
System.out.println("Starting update...");
start = System.currentTimeMillis();
while (updater.next(target) != null) {
target.setValue(target.getValue() + 1);
table.insertOrReplaceRecord(target);
}
end = System.currentTimeMillis();
System.out.println("Update done. Time: " + (end - start) + " ms");
System.out.println("Starting second probing run...");
start = System.currentTimeMillis();
while (updateTester.next(target) != null) {
assertNotNull(prober.getMatchFor(target, temp));
assertEquals(target.getValue() + 1, temp.getValue());
}
end = System.currentTimeMillis();
System.out.println("Probing done. Time: " + (end - start) + " ms");
table.close();
end = System.currentTimeMillis();
System.out.println("Overall time: " + (end - first) + " ms");
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
Aggregations