use of org.apache.flink.runtime.operators.testutils.types.IntList in project flink by apache.
the class MutableHashTableTestBase method testMultipleProbers.
@Test
public void testMultipleProbers() throws Exception {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
}
AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV);
AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(intPairComparator, pairComparatorPL);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(pairProber.getMatchFor(pairs[i], target));
assertNotNull(listProber.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
use of org.apache.flink.runtime.operators.testutils.types.IntList in project flink by apache.
the class CompactingHashTableTest method testResizeWithCompaction.
@Test
public void testResizeWithCompaction() {
// Only CompactingHashTable
try {
final int NUM_MEM_PAGES = (SIZE * NUM_LISTS / PAGE_SIZE);
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
List<MemorySegment> memory = getMemory(NUM_MEM_PAGES);
CompactingHashTable<IntList> table = new CompactingHashTable<IntList>(serializerV, comparatorV, memory);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
}
AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
// make sure there is enough memory for resize
memory.addAll(getMemory(ADDITIONAL_MEM));
Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
assertTrue(b);
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd);
// test replacing
for (int i = 0; i < NUM_LISTS; i++) {
table.insertOrReplaceRecord(overwriteLists[i]);
}
Field list = Whitebox.getField(CompactingHashTable.class, "partitions");
@SuppressWarnings("unchecked") ArrayList<InMemoryPartition<IntList>> partitions = (ArrayList<InMemoryPartition<IntList>>) list.get(table);
int numPartitions = partitions.size();
for (int i = 0; i < numPartitions; i++) {
Whitebox.invokeMethod(table, "compactPartition", i);
}
// make sure there is enough memory for resize
memory.addAll(getMemory(2 * ADDITIONAL_MEM));
b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
assertTrue(b);
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target));
assertArrayEquals(overwriteLists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES + 3 * ADDITIONAL_MEM, table.getFreeMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
use of org.apache.flink.runtime.operators.testutils.types.IntList in project flink by apache.
the class MutableHashTableTestBase method testProberUpdate.
@Test
public void testProberUpdate() throws Exception {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
}
final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd);
AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull("" + i, prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
prober.updateMatch(overwriteLists[i]);
}
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target));
assertArrayEquals(overwriteLists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
use of org.apache.flink.runtime.operators.testutils.types.IntList in project flink by apache.
the class MutableHashTableTestBase method testRepeatedBuildAndRetrieve.
@Test
public void testRepeatedBuildAndRetrieve() throws Exception {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
try {
table.insert(lists[i]);
} catch (Exception e) {
throw e;
}
}
AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
IntList[] overwriteLists;
for (int k = 0; k < NUM_REWRITES; k++) {
overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd);
// test replacing
for (int i = 0; i < NUM_LISTS; i++) {
table.insertOrReplaceRecord(overwriteLists[i]);
}
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target));
assertArrayEquals(overwriteLists[i].getValue(), target.getValue());
}
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
Aggregations