Search in sources :

Example 6 with Compressor

use of org.h2.compress.Compressor in project h2database by h2database.

the class MVStoreTool method dump.

/**
 * Read the contents of the file and display them in a human-readable
 * format.
 *
 * @param fileName the name of the file
 * @param writer the print writer
 * @param details print the page details
 */
public static void dump(String fileName, Writer writer, boolean details) {
    PrintWriter pw = new PrintWriter(writer, true);
    if (!FilePath.get(fileName).exists()) {
        pw.println("File not found: " + fileName);
        return;
    }
    long size = FileUtils.size(fileName);
    pw.printf("File %s, %d bytes, %d MB\n", fileName, size, size / 1024 / 1024);
    FileChannel file = null;
    int blockSize = MVStore.BLOCK_SIZE;
    TreeMap<Integer, Long> mapSizesTotal = new TreeMap<>();
    long pageSizeTotal = 0;
    try {
        file = FilePath.get(fileName).open("r");
        long fileSize = file.size();
        int len = Long.toHexString(fileSize).length();
        ByteBuffer block = ByteBuffer.allocate(4096);
        long pageCount = 0;
        for (long pos = 0; pos < fileSize; ) {
            block.rewind();
            DataUtils.readFully(file, pos, block);
            block.rewind();
            int headerType = block.get();
            if (headerType == 'H') {
                String header = new String(block.array(), StandardCharsets.ISO_8859_1).trim();
                pw.printf("%0" + len + "x fileHeader %s%n", pos, header);
                pos += blockSize;
                continue;
            }
            if (headerType != 'c') {
                pos += blockSize;
                continue;
            }
            block.position(0);
            Chunk c = null;
            try {
                c = Chunk.readChunkHeader(block, pos);
            } catch (IllegalStateException e) {
                pos += blockSize;
                continue;
            }
            if (c.len <= 0) {
                // not a chunk
                pos += blockSize;
                continue;
            }
            int length = c.len * MVStore.BLOCK_SIZE;
            pw.printf("%n%0" + len + "x chunkHeader %s%n", pos, c.toString());
            ByteBuffer chunk = ByteBuffer.allocate(length);
            DataUtils.readFully(file, pos, chunk);
            int p = block.position();
            pos += length;
            int remaining = c.pageCount;
            pageCount += c.pageCount;
            TreeMap<Integer, Integer> mapSizes = new TreeMap<>();
            int pageSizeSum = 0;
            while (remaining > 0) {
                int start = p;
                try {
                    chunk.position(p);
                } catch (IllegalArgumentException e) {
                    // too far
                    pw.printf("ERROR illegal position %d%n", p);
                    break;
                }
                int pageSize = chunk.getInt();
                // check value (ignored)
                chunk.getShort();
                int mapId = DataUtils.readVarInt(chunk);
                int entries = DataUtils.readVarInt(chunk);
                int type = chunk.get();
                boolean compressed = (type & DataUtils.PAGE_COMPRESSED) != 0;
                boolean node = (type & 1) != 0;
                if (details) {
                    pw.printf("+%0" + len + "x %s, map %x, %d entries, %d bytes, maxLen %x%n", p, (node ? "node" : "leaf") + (compressed ? " compressed" : ""), mapId, node ? entries + 1 : entries, pageSize, DataUtils.getPageMaxLength(DataUtils.getPagePos(0, 0, pageSize, 0)));
                }
                p += pageSize;
                Integer mapSize = mapSizes.get(mapId);
                if (mapSize == null) {
                    mapSize = 0;
                }
                mapSizes.put(mapId, mapSize + pageSize);
                Long total = mapSizesTotal.get(mapId);
                if (total == null) {
                    total = 0L;
                }
                mapSizesTotal.put(mapId, total + pageSize);
                pageSizeSum += pageSize;
                pageSizeTotal += pageSize;
                remaining--;
                long[] children = null;
                long[] counts = null;
                if (node) {
                    children = new long[entries + 1];
                    for (int i = 0; i <= entries; i++) {
                        children[i] = chunk.getLong();
                    }
                    counts = new long[entries + 1];
                    for (int i = 0; i <= entries; i++) {
                        long s = DataUtils.readVarLong(chunk);
                        counts[i] = s;
                    }
                }
                String[] keys = new String[entries];
                if (mapId == 0 && details) {
                    ByteBuffer data;
                    if (compressed) {
                        boolean fast = (type & DataUtils.PAGE_COMPRESSED_HIGH) != DataUtils.PAGE_COMPRESSED_HIGH;
                        Compressor compressor = getCompressor(fast);
                        int lenAdd = DataUtils.readVarInt(chunk);
                        int compLen = pageSize + start - chunk.position();
                        byte[] comp = Utils.newBytes(compLen);
                        chunk.get(comp);
                        int l = compLen + lenAdd;
                        data = ByteBuffer.allocate(l);
                        compressor.expand(comp, 0, compLen, data.array(), 0, l);
                    } else {
                        data = chunk;
                    }
                    for (int i = 0; i < entries; i++) {
                        String k = StringDataType.INSTANCE.read(data);
                        keys[i] = k;
                    }
                    if (node) {
                        // meta map node
                        for (int i = 0; i < entries; i++) {
                            long cp = children[i];
                            pw.printf("    %d children < %s @ " + "chunk %x +%0" + len + "x%n", counts[i], keys[i], DataUtils.getPageChunkId(cp), DataUtils.getPageOffset(cp));
                        }
                        long cp = children[entries];
                        pw.printf("    %d children >= %s @ chunk %x +%0" + len + "x%n", counts[entries], keys.length >= entries ? null : keys[entries], DataUtils.getPageChunkId(cp), DataUtils.getPageOffset(cp));
                    } else {
                        // meta map leaf
                        String[] values = new String[entries];
                        for (int i = 0; i < entries; i++) {
                            String v = StringDataType.INSTANCE.read(data);
                            values[i] = v;
                        }
                        for (int i = 0; i < entries; i++) {
                            pw.println("    " + keys[i] + " = " + values[i]);
                        }
                    }
                } else {
                    if (node && details) {
                        for (int i = 0; i <= entries; i++) {
                            long cp = children[i];
                            pw.printf("    %d children @ chunk %x +%0" + len + "x%n", counts[i], DataUtils.getPageChunkId(cp), DataUtils.getPageOffset(cp));
                        }
                    }
                }
            }
            pageSizeSum = Math.max(1, pageSizeSum);
            for (Integer mapId : mapSizes.keySet()) {
                int percent = 100 * mapSizes.get(mapId) / pageSizeSum;
                pw.printf("map %x: %d bytes, %d%%%n", mapId, mapSizes.get(mapId), percent);
            }
            int footerPos = chunk.limit() - Chunk.FOOTER_LENGTH;
            try {
                chunk.position(footerPos);
                pw.printf("+%0" + len + "x chunkFooter %s%n", footerPos, new String(chunk.array(), chunk.position(), Chunk.FOOTER_LENGTH, StandardCharsets.ISO_8859_1).trim());
            } catch (IllegalArgumentException e) {
                // too far
                pw.printf("ERROR illegal footer position %d%n", footerPos);
            }
        }
        pw.printf("%n%0" + len + "x eof%n", fileSize);
        pw.printf("\n");
        pageCount = Math.max(1, pageCount);
        pw.printf("page size total: %d bytes, page count: %d, average page size: %d bytes\n", pageSizeTotal, pageCount, pageSizeTotal / pageCount);
        pageSizeTotal = Math.max(1, pageSizeTotal);
        for (Integer mapId : mapSizesTotal.keySet()) {
            int percent = (int) (100 * mapSizesTotal.get(mapId) / pageSizeTotal);
            pw.printf("map %x: %d bytes, %d%%%n", mapId, mapSizesTotal.get(mapId), percent);
        }
    } catch (IOException e) {
        pw.println("ERROR: " + e);
        e.printStackTrace(pw);
    } finally {
        if (file != null) {
            try {
                file.close();
            } catch (IOException e) {
            // ignore
            }
        }
    }
    pw.flush();
}
Also used : FileChannel(java.nio.channels.FileChannel) Compressor(org.h2.compress.Compressor) IOException(java.io.IOException) TreeMap(java.util.TreeMap) ByteBuffer(java.nio.ByteBuffer) PrintWriter(java.io.PrintWriter)

Example 7 with Compressor

use of org.h2.compress.Compressor in project h2database by h2database.

the class Page method read.

/**
 * Read the page from the buffer.
 *
 * @param buff the buffer
 * @param chunkId the chunk id
 * @param offset the offset within the chunk
 * @param maxLength the maximum length
 */
void read(ByteBuffer buff, int chunkId, int offset, int maxLength) {
    int start = buff.position();
    int pageLength = buff.getInt();
    if (pageLength > maxLength || pageLength < 4) {
        throw DataUtils.newIllegalStateException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected page length 4..{1}, got {2}", chunkId, maxLength, pageLength);
    }
    buff.limit(start + pageLength);
    short check = buff.getShort();
    int mapId = DataUtils.readVarInt(buff);
    if (mapId != map.getId()) {
        throw DataUtils.newIllegalStateException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected map id {1}, got {2}", chunkId, map.getId(), mapId);
    }
    int checkTest = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(offset) ^ DataUtils.getCheckValue(pageLength);
    if (check != (short) checkTest) {
        throw DataUtils.newIllegalStateException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected check value {1}, got {2}", chunkId, checkTest, check);
    }
    int len = DataUtils.readVarInt(buff);
    keys = new Object[len];
    int type = buff.get();
    boolean node = (type & 1) == DataUtils.PAGE_TYPE_NODE;
    if (node) {
        children = new PageReference[len + 1];
        long[] p = new long[len + 1];
        for (int i = 0; i <= len; i++) {
            p[i] = buff.getLong();
        }
        long total = 0;
        for (int i = 0; i <= len; i++) {
            long s = DataUtils.readVarLong(buff);
            total += s;
            children[i] = new PageReference(null, p[i], s);
        }
        totalCount = total;
    }
    boolean compressed = (type & DataUtils.PAGE_COMPRESSED) != 0;
    if (compressed) {
        Compressor compressor;
        if ((type & DataUtils.PAGE_COMPRESSED_HIGH) == DataUtils.PAGE_COMPRESSED_HIGH) {
            compressor = map.getStore().getCompressorHigh();
        } else {
            compressor = map.getStore().getCompressorFast();
        }
        int lenAdd = DataUtils.readVarInt(buff);
        int compLen = pageLength + start - buff.position();
        byte[] comp = Utils.newBytes(compLen);
        buff.get(comp);
        int l = compLen + lenAdd;
        buff = ByteBuffer.allocate(l);
        compressor.expand(comp, 0, compLen, buff.array(), buff.arrayOffset(), l);
    }
    map.getKeyType().read(buff, keys, len, true);
    if (!node) {
        values = new Object[len];
        map.getValueType().read(buff, values, len, false);
        totalCount = len;
    }
    recalculateMemory();
}
Also used : Compressor(org.h2.compress.Compressor)

Example 8 with Compressor

use of org.h2.compress.Compressor in project h2database by h2database.

the class TestCompress method testDatabase.

private void testDatabase() throws Exception {
    deleteDb("memFS:compress");
    Connection conn = getConnection("memFS:compress");
    Statement stat = conn.createStatement();
    ResultSet rs;
    rs = stat.executeQuery("select table_name from information_schema.tables");
    Statement stat2 = conn.createStatement();
    while (rs.next()) {
        String table = rs.getString(1);
        if (!"COLLATIONS".equals(table)) {
            stat2.execute("create table " + table + " as select * from information_schema." + table);
        }
    }
    conn.close();
    Compressor compress = new CompressLZF();
    int pageSize = Constants.DEFAULT_PAGE_SIZE;
    byte[] buff2 = new byte[pageSize];
    byte[] test = new byte[2 * pageSize];
    compress.compress(buff2, pageSize, test, 0);
    for (int j = 0; j < 4; j++) {
        long time = System.nanoTime();
        for (int i = 0; i < 1000; i++) {
            InputStream in = FileUtils.newInputStream("memFS:compress.h2.db");
            while (true) {
                int len = in.read(buff2);
                if (len < 0) {
                    break;
                }
                compress.compress(buff2, pageSize, test, 0);
            }
            in.close();
        }
        System.out.println("compress: " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - time) + " ms");
    }
    for (int j = 0; j < 4; j++) {
        ArrayList<byte[]> comp = New.arrayList();
        InputStream in = FileUtils.newInputStream("memFS:compress.h2.db");
        while (true) {
            int len = in.read(buff2);
            if (len < 0) {
                break;
            }
            int b = compress.compress(buff2, pageSize, test, 0);
            byte[] data = Arrays.copyOf(test, b);
            comp.add(data);
        }
        in.close();
        byte[] result = new byte[pageSize];
        long time = System.nanoTime();
        for (int i = 0; i < 1000; i++) {
            for (int k = 0; k < comp.size(); k++) {
                byte[] data = comp.get(k);
                compress.expand(data, 0, data.length, result, 0, pageSize);
            }
        }
        System.out.println("expand: " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - time) + " ms");
    }
}
Also used : CompressLZF(org.h2.compress.CompressLZF) Statement(java.sql.Statement) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) Compressor(org.h2.compress.Compressor)

Aggregations

Compressor (org.h2.compress.Compressor)8 IOException (java.io.IOException)3 DbException (org.h2.message.DbException)2 ByteArrayInputStream (java.io.ByteArrayInputStream)1 InputStream (java.io.InputStream)1 PrintWriter (java.io.PrintWriter)1 ByteBuffer (java.nio.ByteBuffer)1 FileChannel (java.nio.channels.FileChannel)1 Connection (java.sql.Connection)1 ResultSet (java.sql.ResultSet)1 Statement (java.sql.Statement)1 TreeMap (java.util.TreeMap)1 CompressLZF (org.h2.compress.CompressLZF)1