Search in sources :

Example 1 with CompressLZF

use of org.h2.compress.CompressLZF in project h2database by h2database.

the class TestCompress method testByteBuffer.

private void testByteBuffer(int len) {
    if (len < 4) {
        return;
    }
    Random r = new Random(len);
    CompressLZF comp = new CompressLZF();
    for (int pattern = 0; pattern < 4; pattern++) {
        byte[] b = new byte[len];
        switch(pattern) {
            case 0:
                // leave empty
                break;
            case 1:
                {
                    r.nextBytes(b);
                    break;
                }
            case 2:
                {
                    for (int x = 0; x < len; x++) {
                        b[x] = (byte) (x & 10);
                    }
                    break;
                }
            case 3:
                {
                    for (int x = 0; x < len; x++) {
                        b[x] = (byte) (x / 10);
                    }
                    break;
                }
            default:
        }
        if (r.nextInt(2) < 1) {
            for (int x = 0; x < len; x++) {
                if (r.nextInt(20) < 1) {
                    b[x] = (byte) (r.nextInt(255));
                }
            }
        }
        ByteBuffer buff = ByteBuffer.wrap(b);
        byte[] temp = new byte[100 + b.length * 2];
        int compLen = comp.compress(buff, 0, temp, 0);
        ByteBuffer test = ByteBuffer.wrap(temp, 0, compLen);
        byte[] exp = new byte[b.length];
        CompressLZF.expand(test, ByteBuffer.wrap(exp));
        assertEquals(b, exp);
    }
}
Also used : Random(java.util.Random) CompressLZF(org.h2.compress.CompressLZF) ByteBuffer(java.nio.ByteBuffer)

Example 2 with CompressLZF

use of org.h2.compress.CompressLZF in project h2database by h2database.

the class Recover method dumpPageLogStream.

private void dumpPageLogStream(PrintWriter writer, int logKey, int logFirstTrunkPage, int logFirstDataPage, long pageCount) throws IOException {
    Data s = Data.create(this, pageSize);
    DataReader in = new DataReader(new PageInputStream(writer, this, store, logKey, logFirstTrunkPage, logFirstDataPage, pageSize));
    writer.println("---- Transaction log ----");
    CompressLZF compress = new CompressLZF();
    while (true) {
        int x = in.readByte();
        if (x < 0) {
            break;
        }
        if (x == PageLog.NOOP) {
        // ignore
        } else if (x == PageLog.UNDO) {
            int pageId = in.readVarInt();
            int size = in.readVarInt();
            byte[] data = new byte[pageSize];
            if (size == 0) {
                in.readFully(data, pageSize);
            } else if (size == 1) {
            // empty
            } else {
                byte[] compressBuffer = new byte[size];
                in.readFully(compressBuffer, size);
                try {
                    compress.expand(compressBuffer, 0, size, data, 0, pageSize);
                } catch (ArrayIndexOutOfBoundsException e) {
                    throw DbException.convertToIOException(e);
                }
            }
            String typeName = "";
            int type = data[0];
            boolean last = (type & Page.FLAG_LAST) != 0;
            type &= ~Page.FLAG_LAST;
            switch(type) {
                case Page.TYPE_EMPTY:
                    typeName = "empty";
                    break;
                case Page.TYPE_DATA_LEAF:
                    typeName = "data leaf " + (last ? "(last)" : "");
                    break;
                case Page.TYPE_DATA_NODE:
                    typeName = "data node " + (last ? "(last)" : "");
                    break;
                case Page.TYPE_DATA_OVERFLOW:
                    typeName = "data overflow " + (last ? "(last)" : "");
                    break;
                case Page.TYPE_BTREE_LEAF:
                    typeName = "b-tree leaf " + (last ? "(last)" : "");
                    break;
                case Page.TYPE_BTREE_NODE:
                    typeName = "b-tree node " + (last ? "(last)" : "");
                    break;
                case Page.TYPE_FREE_LIST:
                    typeName = "free list " + (last ? "(last)" : "");
                    break;
                case Page.TYPE_STREAM_TRUNK:
                    typeName = "log trunk";
                    break;
                case Page.TYPE_STREAM_DATA:
                    typeName = "log data";
                    break;
                default:
                    typeName = "ERROR: unknown type " + type;
                    break;
            }
            writer.println("-- undo page " + pageId + " " + typeName);
            if (trace) {
                Data d = Data.create(null, data);
                dumpPage(writer, d, pageId, pageCount);
            }
        } else if (x == PageLog.ADD) {
            int sessionId = in.readVarInt();
            setStorage(in.readVarInt());
            Row row = PageLog.readRow(RowFactory.DEFAULT, in, s);
            writer.println("-- session " + sessionId + " table " + storageId + " + " + row.toString());
            if (transactionLog) {
                if (storageId == 0 && row.getColumnCount() >= 4) {
                    int tableId = (int) row.getKey();
                    String sql = row.getValue(3).getString();
                    String name = extractTableOrViewName(sql);
                    if (row.getValue(2).getInt() == DbObject.TABLE_OR_VIEW) {
                        tableMap.put(tableId, name);
                    }
                    writer.println(sql + ";");
                } else {
                    String tableName = tableMap.get(storageId);
                    if (tableName != null) {
                        StatementBuilder buff = new StatementBuilder();
                        buff.append("INSERT INTO ").append(tableName).append(" VALUES(");
                        for (int i = 0; i < row.getColumnCount(); i++) {
                            buff.appendExceptFirst(", ");
                            buff.append(row.getValue(i).getSQL());
                        }
                        buff.append(");");
                        writer.println(buff.toString());
                    }
                }
            }
        } else if (x == PageLog.REMOVE) {
            int sessionId = in.readVarInt();
            setStorage(in.readVarInt());
            long key = in.readVarLong();
            writer.println("-- session " + sessionId + " table " + storageId + " - " + key);
            if (transactionLog) {
                if (storageId == 0) {
                    int tableId = (int) key;
                    String tableName = tableMap.get(tableId);
                    if (tableName != null) {
                        writer.println("DROP TABLE IF EXISTS " + tableName + ";");
                    }
                } else {
                    String tableName = tableMap.get(storageId);
                    if (tableName != null) {
                        String sql = "DELETE FROM " + tableName + " WHERE _ROWID_ = " + key + ";";
                        writer.println(sql);
                    }
                }
            }
        } else if (x == PageLog.TRUNCATE) {
            int sessionId = in.readVarInt();
            setStorage(in.readVarInt());
            writer.println("-- session " + sessionId + " table " + storageId + " truncate");
            if (transactionLog) {
                writer.println("TRUNCATE TABLE " + storageId);
            }
        } else if (x == PageLog.COMMIT) {
            int sessionId = in.readVarInt();
            writer.println("-- commit " + sessionId);
        } else if (x == PageLog.ROLLBACK) {
            int sessionId = in.readVarInt();
            writer.println("-- rollback " + sessionId);
        } else if (x == PageLog.PREPARE_COMMIT) {
            int sessionId = in.readVarInt();
            String transaction = in.readString();
            writer.println("-- prepare commit " + sessionId + " " + transaction);
        } else if (x == PageLog.NOOP) {
        // nothing to do
        } else if (x == PageLog.CHECKPOINT) {
            writer.println("-- checkpoint");
        } else if (x == PageLog.FREE_LOG) {
            int size = in.readVarInt();
            StringBuilder buff = new StringBuilder("-- free");
            for (int i = 0; i < size; i++) {
                buff.append(' ').append(in.readVarInt());
            }
            writer.println(buff);
        } else {
            writer.println("-- ERROR: unknown operation " + x);
            break;
        }
    }
}
Also used : DataReader(org.h2.store.DataReader) CompressLZF(org.h2.compress.CompressLZF) StatementBuilder(org.h2.util.StatementBuilder) Data(org.h2.store.Data) Row(org.h2.result.Row) SimpleRow(org.h2.result.SimpleRow)

Example 3 with CompressLZF

use of org.h2.compress.CompressLZF in project h2database by h2database.

the class TestCompress method testDatabase.

private void testDatabase() throws Exception {
    deleteDb("memFS:compress");
    Connection conn = getConnection("memFS:compress");
    Statement stat = conn.createStatement();
    ResultSet rs;
    rs = stat.executeQuery("select table_name from information_schema.tables");
    Statement stat2 = conn.createStatement();
    while (rs.next()) {
        String table = rs.getString(1);
        if (!"COLLATIONS".equals(table)) {
            stat2.execute("create table " + table + " as select * from information_schema." + table);
        }
    }
    conn.close();
    Compressor compress = new CompressLZF();
    int pageSize = Constants.DEFAULT_PAGE_SIZE;
    byte[] buff2 = new byte[pageSize];
    byte[] test = new byte[2 * pageSize];
    compress.compress(buff2, pageSize, test, 0);
    for (int j = 0; j < 4; j++) {
        long time = System.nanoTime();
        for (int i = 0; i < 1000; i++) {
            InputStream in = FileUtils.newInputStream("memFS:compress.h2.db");
            while (true) {
                int len = in.read(buff2);
                if (len < 0) {
                    break;
                }
                compress.compress(buff2, pageSize, test, 0);
            }
            in.close();
        }
        System.out.println("compress: " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - time) + " ms");
    }
    for (int j = 0; j < 4; j++) {
        ArrayList<byte[]> comp = New.arrayList();
        InputStream in = FileUtils.newInputStream("memFS:compress.h2.db");
        while (true) {
            int len = in.read(buff2);
            if (len < 0) {
                break;
            }
            int b = compress.compress(buff2, pageSize, test, 0);
            byte[] data = Arrays.copyOf(test, b);
            comp.add(data);
        }
        in.close();
        byte[] result = new byte[pageSize];
        long time = System.nanoTime();
        for (int i = 0; i < 1000; i++) {
            for (int k = 0; k < comp.size(); k++) {
                byte[] data = comp.get(k);
                compress.expand(data, 0, data.length, result, 0, pageSize);
            }
        }
        System.out.println("expand: " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - time) + " ms");
    }
}
Also used : CompressLZF(org.h2.compress.CompressLZF) Statement(java.sql.Statement) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) Compressor(org.h2.compress.Compressor)

Aggregations

CompressLZF (org.h2.compress.CompressLZF)3 ByteArrayInputStream (java.io.ByteArrayInputStream)1 InputStream (java.io.InputStream)1 ByteBuffer (java.nio.ByteBuffer)1 Connection (java.sql.Connection)1 ResultSet (java.sql.ResultSet)1 Statement (java.sql.Statement)1 Random (java.util.Random)1 Compressor (org.h2.compress.Compressor)1 Row (org.h2.result.Row)1 SimpleRow (org.h2.result.SimpleRow)1 Data (org.h2.store.Data)1 DataReader (org.h2.store.DataReader)1 StatementBuilder (org.h2.util.StatementBuilder)1