use of java.util.Random in project flink by apache.
the class MutableHashTableTestBase method testEntryIterator.
@Test
public void testEntryIterator() throws Exception {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
table.open();
int result = 0;
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
result += lists[i].getKey();
}
MutableObjectIterator<IntList> iter = table.getEntryIterator();
IntList target = new IntList();
int sum = 0;
while ((target = iter.next(target)) != null) {
sum += target.getKey();
}
table.close();
assertTrue(sum == result);
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
use of java.util.Random in project flink by apache.
the class MutableHashTableTestBase method testVariableLengthBuildAndRetrieveMajorityUpdated.
@Test
public void testVariableLengthBuildAndRetrieveMajorityUpdated() throws Exception {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
}
AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd);
// test replacing
for (int i = 0; i < NUM_LISTS; i++) {
if (i % 100 != 0) {
table.insertOrReplaceRecord(overwriteLists[i]);
lists[i] = overwriteLists[i];
}
}
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull("" + i, prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
use of java.util.Random in project flink by apache.
the class MutableHashTableTestBase method testVariableLengthBuildAndRetrieve.
@Test
public void testVariableLengthBuildAndRetrieve() throws Exception {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
try {
table.insert(lists[i]);
} catch (Exception e) {
throw e;
}
}
AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd);
// test replacing
for (int i = 0; i < NUM_LISTS; i++) {
table.insertOrReplaceRecord(overwriteLists[i]);
}
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target));
assertArrayEquals(overwriteLists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
use of java.util.Random in project flink by apache.
the class MutableHashTableTestBase method testVariableLengthBuildAndRetrieveMinorityUpdated.
@Test
public void testVariableLengthBuildAndRetrieveMinorityUpdated() throws Exception {
final int NUM_LISTS = 20000;
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final int STEP_SIZE = 100;
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
}
AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS / STEP_SIZE, rnd);
// test replacing
for (int i = 0; i < NUM_LISTS; i += STEP_SIZE) {
overwriteLists[i / STEP_SIZE].setKey(overwriteLists[i / STEP_SIZE].getKey() * STEP_SIZE);
table.insertOrReplaceRecord(overwriteLists[i / STEP_SIZE]);
lists[i] = overwriteLists[i / STEP_SIZE];
}
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(prober.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
use of java.util.Random in project flink by apache.
the class MutableHashTableTestBase method testMultipleProbers.
@Test
public void testMultipleProbers() throws Exception {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));
final Random rnd = new Random(RANDOM_SEED);
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd);
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
}
AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV);
AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(intPairComparator, pairComparatorPL);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(pairProber.getMatchFor(pairs[i], target));
assertNotNull(listProber.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
Aggregations