use of org.h2.store.Page in project h2database by h2database.
the class TestMVStoreTool method testCompact.
private void testCompact() {
String fileName = getBaseDir() + "/testCompact.h3";
FileUtils.createDirectories(getBaseDir());
FileUtils.delete(fileName);
// store with a very small page size, to make sure
// there are many leaf pages
MVStore s = new MVStore.Builder().pageSplitSize(1000).fileName(fileName).autoCommitDisabled().open();
MVMap<Integer, String> map = s.openMap("data");
for (int i = 0; i < 10; i++) {
map.put(i, "Hello World " + i * 10);
if (i % 3 == 0) {
s.commit();
}
}
for (int i = 0; i < 20; i++) {
map = s.openMap("data" + i);
for (int j = 0; j < i * i; j++) {
map.put(j, "Hello World " + j * 10);
}
s.commit();
}
MVRTreeMap<String> rTreeMap = s.openMap("rtree", new MVRTreeMap.Builder<String>());
Random r = new Random(1);
for (int i = 0; i < 10; i++) {
float x = r.nextFloat();
float y = r.nextFloat();
float width = r.nextFloat() / 10;
float height = r.nextFloat() / 10;
SpatialKey k = new SpatialKey(i, x, x + width, y, y + height);
rTreeMap.put(k, "Hello World " + i * 10);
if (i % 3 == 0) {
s.commit();
}
}
s.close();
MVStoreTool.compact(fileName, fileName + ".new", false);
MVStoreTool.compact(fileName, fileName + ".new.compress", true);
MVStore s1 = new MVStore.Builder().fileName(fileName).readOnly().open();
MVStore s2 = new MVStore.Builder().fileName(fileName + ".new").readOnly().open();
MVStore s3 = new MVStore.Builder().fileName(fileName + ".new.compress").readOnly().open();
assertEquals(s1, s2);
assertEquals(s1, s3);
s1.close();
s2.close();
s3.close();
long size1 = FileUtils.size(fileName);
long size2 = FileUtils.size(fileName + ".new");
long size3 = FileUtils.size(fileName + ".new.compress");
assertTrue("size1: " + size1 + " size2: " + size2 + " size3: " + size3, size2 < size1 && size3 < size2);
MVStoreTool.compact(fileName, false);
assertEquals(size2, FileUtils.size(fileName));
MVStoreTool.compact(fileName, true);
assertEquals(size3, FileUtils.size(fileName));
}
use of org.h2.store.Page in project h2database by h2database.
the class Recover method dumpPageStore.
private void dumpPageStore(PrintWriter writer, long pageCount) {
Data s = Data.create(this, pageSize);
for (long page = 3; page < pageCount; page++) {
s = Data.create(this, pageSize);
seek(page);
store.readFully(s.getBytes(), 0, pageSize);
dumpPage(writer, s, page, pageCount);
}
}
use of org.h2.store.Page in project h2database by h2database.
the class Recover method dumpPageStore.
private void dumpPageStore(String fileName) {
setDatabaseName(fileName.substring(0, fileName.length() - Constants.SUFFIX_PAGE_FILE.length()));
PrintWriter writer = null;
stat = new Stats();
try {
writer = getWriter(fileName, ".sql");
writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB FOR \"" + this.getClass().getName() + ".readBlob\";");
writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB FOR \"" + this.getClass().getName() + ".readClob\";");
writer.println("CREATE ALIAS IF NOT EXISTS READ_BLOB_DB FOR \"" + this.getClass().getName() + ".readBlobDb\";");
writer.println("CREATE ALIAS IF NOT EXISTS READ_CLOB_DB FOR \"" + this.getClass().getName() + ".readClobDb\";");
resetSchema();
store = FileStore.open(null, fileName, remove ? "rw" : "r");
long length = store.length();
try {
store.init();
} catch (Exception e) {
writeError(writer, e);
}
Data s = Data.create(this, 128);
seek(0);
store.readFully(s.getBytes(), 0, 128);
s.setPos(48);
pageSize = s.readInt();
int writeVersion = s.readByte();
int readVersion = s.readByte();
writer.println("-- pageSize: " + pageSize + " writeVersion: " + writeVersion + " readVersion: " + readVersion);
if (pageSize < PageStore.PAGE_SIZE_MIN || pageSize > PageStore.PAGE_SIZE_MAX) {
pageSize = Constants.DEFAULT_PAGE_SIZE;
writer.println("-- ERROR: page size; using " + pageSize);
}
long pageCount = length / pageSize;
parents = new int[(int) pageCount];
s = Data.create(this, pageSize);
for (long i = 3; i < pageCount; i++) {
s.reset();
seek(i);
store.readFully(s.getBytes(), 0, 32);
s.readByte();
s.readShortInt();
parents[(int) i] = s.readInt();
}
int logKey = 0, logFirstTrunkPage = 0, logFirstDataPage = 0;
s = Data.create(this, pageSize);
for (long i = 1; ; i++) {
if (i == 3) {
break;
}
s.reset();
seek(i);
store.readFully(s.getBytes(), 0, pageSize);
CRC32 crc = new CRC32();
crc.update(s.getBytes(), 4, pageSize - 4);
int expected = (int) crc.getValue();
int got = s.readInt();
long writeCounter = s.readLong();
int key = s.readInt();
int firstTrunkPage = s.readInt();
int firstDataPage = s.readInt();
if (expected == got) {
logKey = key;
logFirstTrunkPage = firstTrunkPage;
logFirstDataPage = firstDataPage;
}
writer.println("-- head " + i + ": writeCounter: " + writeCounter + " log " + key + ":" + firstTrunkPage + "/" + firstDataPage + " crc " + got + " (" + (expected == got ? "ok" : ("expected: " + expected)) + ")");
}
writer.println("-- log " + logKey + ":" + logFirstTrunkPage + "/" + logFirstDataPage);
PrintWriter devNull = new PrintWriter(new OutputStream() {
@Override
public void write(int b) {
// ignore
}
});
dumpPageStore(devNull, pageCount);
stat = new Stats();
schema.clear();
objectIdSet = new HashSet<>();
dumpPageStore(writer, pageCount);
writeSchema(writer);
try {
dumpPageLogStream(writer, logKey, logFirstTrunkPage, logFirstDataPage, pageCount);
} catch (IOException e) {
// ignore
}
writer.println("---- Statistics ----");
writer.println("-- page count: " + pageCount + ", free: " + stat.free);
long total = Math.max(1, stat.pageDataRows + stat.pageDataEmpty + stat.pageDataHead);
writer.println("-- page data bytes: head " + stat.pageDataHead + ", empty " + stat.pageDataEmpty + ", rows " + stat.pageDataRows + " (" + (100 - 100L * stat.pageDataEmpty / total) + "% full)");
for (int i = 0; i < stat.pageTypeCount.length; i++) {
int count = stat.pageTypeCount[i];
if (count > 0) {
writer.println("-- " + getPageType(i) + " " + (100 * count / pageCount) + "%, " + count + " page(s)");
}
}
writer.close();
} catch (Throwable e) {
writeError(writer, e);
} finally {
IOUtils.closeSilently(writer);
closeSilently(store);
}
}
use of org.h2.store.Page in project h2database by h2database.
the class BnfSyntax method getLink.
/**
* Get the HTML link to the given token.
*
* @param bnf the BNF
* @param token the token
* @return the HTML link
*/
String getLink(Bnf bnf, String token) {
RuleHead found = null;
String key = Bnf.getRuleMapKey(token);
for (int i = 0; i < token.length(); i++) {
String test = StringUtils.toLowerEnglish(key.substring(i));
RuleHead r = bnf.getRuleHead(test);
if (r != null) {
found = r;
break;
}
}
if (found == null) {
return token;
}
String page = "grammar.html";
if (found.getSection().startsWith("Data Types")) {
page = "datatypes.html";
} else if (found.getSection().startsWith("Functions")) {
page = "functions.html";
} else if (token.equals("@func@")) {
return "<a href=\"functions.html\">Function</a>";
} else if (found.getRule() instanceof RuleFixed) {
found.getRule().accept(this);
return html;
}
String link = found.getTopic().toLowerCase().replace(' ', '_');
link = page + "#" + StringUtils.urlEncode(link);
return "<a href=\"" + link + "\">" + token + "</a>";
}
use of org.h2.store.Page in project h2database by h2database.
the class MVStore method readPageChunkReferences.
private PageChildren readPageChunkReferences(int mapId, long pos, int parentChunk) {
if (DataUtils.getPageType(pos) == DataUtils.PAGE_TYPE_LEAF) {
return null;
}
PageChildren r;
if (cacheChunkRef != null) {
r = cacheChunkRef.get(pos);
} else {
r = null;
}
if (r == null) {
// if possible, create it from the cached page
if (cache != null) {
Page p = cache.get(pos);
if (p != null) {
r = new PageChildren(p);
}
}
if (r == null) {
// page was not cached: read the data
Chunk c = getChunk(pos);
long filePos = c.block * BLOCK_SIZE;
filePos += DataUtils.getPageOffset(pos);
if (filePos < 0) {
throw DataUtils.newIllegalStateException(DataUtils.ERROR_FILE_CORRUPT, "Negative position {0}; p={1}, c={2}", filePos, pos, c.toString());
}
long maxPos = (c.block + c.len) * BLOCK_SIZE;
r = PageChildren.read(fileStore, pos, mapId, filePos, maxPos);
}
r.removeDuplicateChunkReferences();
if (cacheChunkRef != null) {
cacheChunkRef.put(pos, r, r.getMemory());
}
}
if (r.children.length == 0) {
int chunk = DataUtils.getPageChunkId(pos);
if (chunk == parentChunk) {
return null;
}
}
return r;
}
Aggregations