use of herddb.core.RandomPageReplacementPolicy in project herddb by diennea.
the class BLinkTest method testScanMiddleNotExistent.
@Test
public void testScanMiddleNotExistent() throws Exception {
BLinkIndexDataStorage<Sized<Long>, Long> storage = new DummyBLinkIndexDataStorage<>();
try (BLink<Sized<Long>, Long> blink = new BLink<>(2048L, new LongSizeEvaluator(), new RandomPageReplacementPolicy(10), storage)) {
final long headInserts = 100;
final long nonExistents = 100;
final long tailInserts = 100;
for (long l = 0; l < headInserts; l++) {
blink.insert(Sized.valueOf(l), l);
}
for (long l = headInserts + nonExistents; l < headInserts + nonExistents + tailInserts; l++) {
blink.insert(Sized.valueOf(l), l);
}
BLinkMetadata<Sized<Long>> metadata = blink.checkpoint();
/* Require at least two nodes! */
assertNotEquals(1, metadata.nodes.size());
long offset = 10;
for (long l = headInserts; l < headInserts + nonExistents - offset; l++) {
Stream<Entry<Sized<Long>, Long>> stream = blink.scan(Sized.valueOf(l), Sized.valueOf(l + offset));
Holder<Long> h = new Holder<>(l);
Holder<Long> count = new Holder<>(0L);
StringBuilder builder = new StringBuilder();
/* Check each value */
stream.forEach(entry -> {
assertEquals(h.value, entry.getValue());
h.value++;
count.value++;
builder.append(entry.getValue()).append(", ");
});
assertEquals(0, (long) count.value);
}
}
}
use of herddb.core.RandomPageReplacementPolicy in project herddb by diennea.
the class BLinkTest method testScan.
@Test
public void testScan() throws Exception {
BLinkIndexDataStorage<Sized<Long>, Long> storage = new DummyBLinkIndexDataStorage<>();
try (BLink<Sized<Long>, Long> blink = new BLink<>(2048L, new LongSizeEvaluator(), new RandomPageReplacementPolicy(10), storage)) {
final long inserts = 100;
for (long l = 0; l < inserts; l++) {
blink.insert(Sized.valueOf(l), l);
}
BLinkMetadata<Sized<Long>> metadata = blink.checkpoint();
/* Require at least two nodes! */
assertNotEquals(1, metadata.nodes.size());
long offset = 10;
for (long l = 0; l < inserts - offset; l++) {
Stream<Entry<Sized<Long>, Long>> stream = blink.scan(Sized.valueOf(l), Sized.valueOf(l + offset));
Holder<Long> h = new Holder<>(l);
Holder<Long> count = new Holder<>(0L);
StringBuilder builder = new StringBuilder();
/* Check each value */
stream.forEach(entry -> {
assertEquals(h.value, entry.getValue());
h.value++;
count.value++;
builder.append(entry.getValue()).append(", ");
});
builder.setLength(builder.length() - 2);
System.out.println("start " + l + " end " + (l + offset) + " -> " + builder);
assertEquals(offset, (long) count.value);
}
}
}
use of herddb.core.RandomPageReplacementPolicy in project herddb by diennea.
the class BLinkTest method testCheckpointAndRestore.
@Test
public void testCheckpointAndRestore() throws Exception {
String[] data = new String[] { "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z" };
BLinkIndexDataStorage<Sized<String>, Long> storage = new DummyBLinkIndexDataStorage<>();
BLinkMetadata<Sized<String>> metadata;
try (BLink<Sized<String>, Long> blink = new BLink<>(2048L, new StringSizeEvaluator(), new RandomPageReplacementPolicy(3), storage)) {
for (int i = 0; i < data.length; ++i) {
blink.insert(Sized.valueOf(data[i]), i + 1L);
}
assertEquals(data.length, blink.size());
metadata = blink.checkpoint();
}
try (BLink<Sized<String>, Long> blinkFromMeta = new BLink<>(2048L, new StringSizeEvaluator(), new RandomPageReplacementPolicy(3), storage, metadata)) {
/* Require at least two nodes! */
assertNotEquals(1, metadata.nodes.size());
for (int i = 0; i < data.length; ++i) {
assertEquals(i + 1L, (long) blinkFromMeta.search(Sized.valueOf(data[i])));
}
assertEquals(data.length, blinkFromMeta.size());
}
}
use of herddb.core.RandomPageReplacementPolicy in project herddb by diennea.
the class BLinkTest method testUnload.
@Test
public void testUnload() throws Exception {
final int pages = 5;
final int inserts = 100;
BLinkIndexDataStorage<Sized<Long>, Long> storage = new DummyBLinkIndexDataStorage<>();
PageReplacementPolicy policy = new RandomPageReplacementPolicy(pages);
try (BLink<Sized<Long>, Long> blink = new BLink<>(2048L, new LongSizeEvaluator(), policy, storage)) {
for (long l = 0; l < inserts; l++) {
blink.insert(Sized.valueOf(l), l);
}
/* Must fill the polocy */
assertEquals(pages, policy.size());
assertEquals(inserts, blink.size());
blink.close();
/* No pages should remain in memory after unload!! */
assertEquals(0, policy.size());
assertEquals(0, blink.size());
}
}
use of herddb.core.RandomPageReplacementPolicy in project herddb by diennea.
the class BlockRangeIndexBench method testHugeMultivalued.
/**
* Test with multivalued keys
*
* @author diego.salvi
*/
@Test
public void testHugeMultivalued() {
final int testSize = 1_000_000;
final int valuesPerKey = 10;
final int tenPerc = testSize / 10;
long _start = System.currentTimeMillis();
BlockRangeIndex<Sized<Integer>, Sized<String>> index = new BlockRangeIndex<>(10000, new RandomPageReplacementPolicy(10000));
index.boot(BlockRangeIndexMetadata.empty());
for (int i = 0; i < testSize; i++) {
Sized<Integer> si = Sized.valueOf(i);
for (int j = 0; j < valuesPerKey; j++) {
Sized<String> sij = Sized.valueOf("test_" + i + "_" + j);
index.put(si, sij);
}
if (i % tenPerc == 0) {
System.out.println("insert : " + (1 + i / tenPerc) * 10 + "%: " + i);
}
}
long _stop = System.currentTimeMillis();
System.out.println("time w: " + (_stop - _start));
System.out.println("num segments: " + index.getNumBlocks());
for (int i = 0; i < testSize; i++) {
List<Sized<String>> s = index.search(Sized.valueOf(i));
Assert.assertEquals(valuesPerKey, s.size());
for (int j = 0; j < valuesPerKey; ++j) {
Assert.assertEquals("test_" + i + "_" + j, s.get(j).dummy);
}
if (i % tenPerc == 0) {
System.out.println("search : " + (1 + i / tenPerc) * 10 + "%: " + i);
}
}
_start = _stop;
_stop = System.currentTimeMillis();
System.out.println("time r: " + (_stop - _start));
index.clear();
}
Aggregations