use of org.apache.flink.table.data.binary.BinaryRowData in project flink by apache.
the class BytesHashMapTestBase method verifyInsert.
private void verifyInsert(K[] keys, List<BinaryRowData> inserted, AbstractBytesHashMap<K> table) throws IOException {
for (int i = 0; i < NUM_ENTRIES; i++) {
K groupKey = keys[i];
// look up and insert
BytesMap.LookupInfo<K, BinaryRowData> lookupInfo = table.lookup(groupKey);
Assert.assertFalse(lookupInfo.isFound());
BinaryRowData entry = table.append(lookupInfo, defaultValue);
Assert.assertNotNull(entry);
Assert.assertEquals(entry, defaultValue);
inserted.add(entry.copy());
}
Assert.assertEquals(NUM_ENTRIES, table.getNumElements());
}
use of org.apache.flink.table.data.binary.BinaryRowData in project flink by apache.
the class BytesHashMapTestBase method verifyKeyInsert.
private void verifyKeyInsert(K[] keys, AbstractBytesHashMap<K> table) throws IOException {
BinaryRowData present = new BinaryRowData(0);
present.pointTo(MemorySegmentFactory.wrap(new byte[8]), 0, 8);
for (int i = 0; i < NUM_ENTRIES; i++) {
K groupKey = keys[i];
// look up and insert
BytesMap.LookupInfo<K, BinaryRowData> lookupInfo = table.lookup(groupKey);
Assert.assertFalse(lookupInfo.isFound());
BinaryRowData entry = table.append(lookupInfo, defaultValue);
Assert.assertNotNull(entry);
Assert.assertEquals(entry, present);
}
Assert.assertEquals(NUM_ENTRIES, table.getNumElements());
}
use of org.apache.flink.table.data.binary.BinaryRowData in project flink by apache.
the class BytesHashMapTestBase method testRest.
@Test
public void testRest() throws Exception {
final int numMemSegments = needNumMemSegments(NUM_ENTRIES, rowLength(RowType.of(VALUE_TYPES)), rowLength(RowType.of(KEY_TYPES)), PAGE_SIZE);
int memorySize = numMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(memorySize).build();
AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, memorySize, KEY_TYPES, VALUE_TYPES);
final K[] keys = generateRandomKeys(NUM_ENTRIES);
List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
verifyInsertAndUpdate(keys, expected, table);
verifyRetrieve(table, keys, expected);
table.reset();
Assert.assertEquals(0, table.getNumElements());
Assert.assertEquals(1, table.getRecordAreaMemorySegments().size());
expected.clear();
verifyInsertAndUpdate(keys, expected, table);
verifyRetrieve(table, keys, expected);
table.free();
}
use of org.apache.flink.table.data.binary.BinaryRowData in project flink by apache.
the class BytesHashMapTestBase method testResetAndOutput.
@Test
public void testResetAndOutput() throws Exception {
final Random rnd = new Random(RANDOM_SEED);
final int reservedMemSegments = 64;
int minMemorySize = reservedMemSegments * PAGE_SIZE;
MemoryManager memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(minMemorySize).build();
AbstractBytesHashMap<K> table = createBytesHashMap(memoryManager, minMemorySize, KEY_TYPES, VALUE_TYPES);
K[] keys = generateRandomKeys(NUM_ENTRIES);
List<BinaryRowData> expected = new ArrayList<>(NUM_ENTRIES);
List<BinaryRowData> actualValues = new ArrayList<>(NUM_ENTRIES);
List<K> actualKeys = new ArrayList<>(NUM_ENTRIES);
for (int i = 0; i < NUM_ENTRIES; i++) {
K groupKey = keys[i];
// look up and insert
BytesMap.LookupInfo<K, BinaryRowData> lookupInfo = table.lookup(groupKey);
Assert.assertFalse(lookupInfo.isFound());
try {
BinaryRowData entry = table.append(lookupInfo, defaultValue);
Assert.assertNotNull(entry);
// mock multiple updates
for (int j = 0; j < NUM_REWRITES; j++) {
updateOutputBuffer(entry, rnd);
}
expected.add(entry.copy());
} catch (Exception e) {
ArrayList<MemorySegment> segments = table.getRecordAreaMemorySegments();
RandomAccessInputView inView = new RandomAccessInputView(segments, segments.get(0).size());
K reuseKey = keySerializer.createInstance();
BinaryRowData reuseValue = valueSerializer.createInstance();
for (int index = 0; index < table.getNumElements(); index++) {
reuseKey = keySerializer.mapFromPages(reuseKey, inView);
reuseValue = valueSerializer.mapFromPages(reuseValue, inView);
actualKeys.add(keySerializer.copy(reuseKey));
actualValues.add(reuseValue.copy());
}
table.reset();
// retry
lookupInfo = table.lookup(groupKey);
BinaryRowData entry = table.append(lookupInfo, defaultValue);
Assert.assertNotNull(entry);
// mock multiple updates
for (int j = 0; j < NUM_REWRITES; j++) {
updateOutputBuffer(entry, rnd);
}
expected.add(entry.copy());
}
}
KeyValueIterator<K, BinaryRowData> iter = table.getEntryIterator(false);
while (iter.advanceNext()) {
actualKeys.add(keySerializer.copy(iter.getKey()));
actualValues.add(iter.getValue().copy());
}
Assert.assertEquals(NUM_ENTRIES, expected.size());
Assert.assertEquals(NUM_ENTRIES, actualKeys.size());
Assert.assertEquals(NUM_ENTRIES, actualValues.size());
Assert.assertEquals(expected, actualValues);
table.free();
}
use of org.apache.flink.table.data.binary.BinaryRowData in project flink by apache.
the class WindowBytesHashMapTest method generateRandomKeys.
@Override
public WindowKey[] generateRandomKeys(int num) {
final Random rnd = new Random(RANDOM_SEED);
BinaryRowData[] keys = getRandomizedInputs(num, rnd, true);
WindowKey[] windowKeys = new WindowKey[num];
for (int i = 0; i < num; i++) {
windowKeys[i] = new WindowKey(rnd.nextLong(), keys[i]);
}
return windowKeys;
}
Aggregations