Search in sources :

Example 6 with DB

use of org.mapdb.DB in project rskj by rsksmart.

the class IndexedBlockStoreTest method test8.

// leveldb + mapdb, multi branch, total re-branch test
@Test
public void test8() throws IOException {
    BigInteger bi = new BigInteger(32, new Random());
    String testDir = "test_db_" + bi;
    config.setDataBaseDir(testDir);
    DB indexDB = createMapDB(testDir);
    Map<Long, List<IndexedBlockStore.BlockInfo>> indexMap = createIndexMap(indexDB);
    KeyValueDataSource blocksDB = new LevelDbDataSource(config, "blocks");
    blocksDB.init();
    try {
        IndexedBlockStore indexedBlockStore = new IndexedBlockStore(indexMap, blocksDB, indexDB);
        Block genesis = Genesis.getInstance(config);
        List<Block> bestLine = getRandomChain(genesis.getHash().getBytes(), 1, 100);
        indexedBlockStore.saveBlock(genesis, genesis.getCumulativeDifficulty(), true);
        BlockDifficulty td = BlockDifficulty.ZERO;
        for (int i = 0; i < bestLine.size(); ++i) {
            Block newBlock = bestLine.get(i);
            td = td.add(newBlock.getCumulativeDifficulty());
            indexedBlockStore.saveBlock(newBlock, td, true);
        }
        byte[] forkParentHash = bestLine.get(60).getHash().getBytes();
        long forkParentNumber = bestLine.get(60).getNumber();
        List<Block> forkLine = getRandomChain(forkParentHash, forkParentNumber + 1, 10);
        for (int i = 0; i < forkLine.size(); ++i) {
            Block newBlock = forkLine.get(i);
            Block parentBlock = indexedBlockStore.getBlockByHash(newBlock.getParentHash().getBytes());
            td = indexedBlockStore.getTotalDifficultyForHash(parentBlock.getHash().getBytes());
            td = td.add(newBlock.getCumulativeDifficulty());
            indexedBlockStore.saveBlock(newBlock, td, false);
        }
        Block bestBlock = bestLine.get(bestLine.size() - 1);
        Block forkBlock = forkLine.get(forkLine.size() - 1);
        assertTrue(indexedBlockStore.getBestBlock().getNumber() == 100);
        indexedBlockStore.reBranch(forkBlock);
        assertTrue(indexedBlockStore.getBestBlock().getNumber() == 71);
        // Assert that all fork moved to the main line
        for (Block currBlock : forkLine) {
            Long number = currBlock.getNumber();
            Block chainBlock = indexedBlockStore.getChainBlockByNumber(number);
            assertEquals(currBlock.getShortHash(), chainBlock.getShortHash());
        }
        // Assert that all fork moved to the main line
        // re-branch back to previous line and assert that
        // all the block really moved
        bestBlock = bestLine.get(bestLine.size() - 1);
        indexedBlockStore.reBranch(bestBlock);
        for (Block currBlock : bestLine) {
            Long number = currBlock.getNumber();
            Block chainBlock = indexedBlockStore.getChainBlockByNumber(number);
            assertEquals(currBlock.getShortHash(), chainBlock.getShortHash());
        }
    } finally {
        blocksDB.close();
        indexDB.close();
        FileUtil.recursiveDelete(testDir);
    }
}
Also used : LevelDbDataSource(org.ethereum.datasource.LevelDbDataSource) BlockDifficulty(co.rsk.core.BlockDifficulty) BigInteger(java.math.BigInteger) KeyValueDataSource(org.ethereum.datasource.KeyValueDataSource) Block(org.ethereum.core.Block) HashMapDB(org.ethereum.datasource.HashMapDB) DB(org.mapdb.DB) Test(org.junit.Test)

Example 7 with DB

use of org.mapdb.DB in project eol-globi-data by jhpoelen.

the class StudyImporterForCoetzer method importStudy.

@Override
public void importStudy() throws StudyImporterException {
    if (org.apache.commons.lang.StringUtils.isBlank(getResourceArchiveURI())) {
        throw new StudyImporterException("failed to import [" + getDataset().getNamespace() + "]: no [archiveURL] specified");
    }
    DB db = DBMaker.newMemoryDirectDB().compressionEnable().transactionDisable().make();
    final HTreeMap<Integer, String> taxonMap = db.createHashMap("taxonMap").make();
    final HTreeMap<Integer, String> refMap = db.createHashMap("refMap").make();
    try {
        InputStream inputStream = DatasetUtil.getNamedResourceStream(getDataset(), "archive");
        ZipInputStream zipInputStream = new ZipInputStream(inputStream);
        ZipEntry entry;
        File taxonTempFile = null;
        File assocTempFile = null;
        File referencesTempFile = null;
        File distributionTempFile = null;
        while ((entry = zipInputStream.getNextEntry()) != null) {
            if (entry.getName().matches("(^|(.*/))taxon.txt$")) {
                taxonTempFile = FileUtils.saveToTmpFile(zipInputStream, entry);
            } else if (entry.getName().matches("(^|(.*/))description.txt$")) {
                assocTempFile = FileUtils.saveToTmpFile(zipInputStream, entry);
            } else if (entry.getName().matches("(^|(.*/))references.txt$")) {
                referencesTempFile = FileUtils.saveToTmpFile(zipInputStream, entry);
            } else if (entry.getName().matches("(^|(.*/))distribution.txt$")) {
                distributionTempFile = FileUtils.saveToTmpFile(zipInputStream, entry);
            } else {
                IOUtils.copy(zipInputStream, new NullOutputStream());
            }
        }
        IOUtils.closeQuietly(zipInputStream);
        if (taxonTempFile == null) {
            throw new StudyImporterException("failed to find expected [taxon.txt] resource");
        }
        if (assocTempFile == null) {
            throw new StudyImporterException("failed to find expected [description.txt] resource");
        }
        if (referencesTempFile == null) {
            throw new StudyImporterException("failed to find expected [references.txt] resource");
        }
        if (distributionTempFile == null) {
            throw new StudyImporterException("failed to find expected [distribution.txt] resource");
        }
        BufferedReader assocReader = FileUtils.getUncompressedBufferedReader(new FileInputStream(taxonTempFile), CharsetConstant.UTF8);
        LabeledCSVParser parser = CSVTSVUtil.createLabeledCSVParser(assocReader);
        parser.changeDelimiter('\t');
        String[] line;
        while ((line = parser.getLine()) != null) {
            taxonMap.put(Integer.parseInt(line[0]), nameFor(line));
        }
        LabeledCSVParser refs = CSVTSVUtil.createLabeledCSVParser(new FileInputStream(referencesTempFile));
        refs.changeDelimiter('\t');
        String[] refsLine;
        while ((refsLine = refs.getLine()) != null) {
            refMap.put(Integer.parseInt(refsLine[0]), refsLine[1]);
        }
        LabeledCSVParser assoc = CSVTSVUtil.createLabeledCSVParser(new FileInputStream(assocTempFile));
        assoc.changeDelimiter('\t');
        final Map<String, InteractType> interactTypeMap = new HashMap<String, InteractType>() {

            {
                put("Visits flowers of", InteractType.VISITS_FLOWERS_OF);
                put("Host of", InteractType.VISITS_FLOWERS_OF);
                put("Parasite of", InteractType.PARASITE_OF);
                put("Nests in", InteractType.INTERACTS_WITH);
            }
        };
        String[] assocLine;
        while ((assocLine = assoc.getLine()) != null) {
            final Integer taxonId = Integer.parseInt(assocLine[0]);
            final String[] parts = assocLine[2].split(":");
            if (parts.length > 1) {
                String interactionString = parts[0];
                String[] targetTaxonNames = parts[1].split(",");
                for (String targetTaxonName : targetTaxonNames) {
                    final String reference = refMap.get(taxonId);
                    final String sourceTaxonName = taxonMap.get(taxonId);
                    if (StringUtils.isNotBlank(reference) && StringUtils.isNotBlank(sourceTaxonName)) {
                        final Study study = nodeFactory.getOrCreateStudy(new StudyImpl(getSourceCitation() + reference, getSourceCitationLastAccessed(), null, reference));
                        final Specimen source = nodeFactory.createSpecimen(study, new TaxonImpl(StringUtils.trim(sourceTaxonName), null));
                        final Specimen target = nodeFactory.createSpecimen(study, new TaxonImpl(StringUtils.trim(targetTaxonName), null));
                        final InteractType relType = interactTypeMap.get(interactionString);
                        if (relType == null) {
                            throw new StudyImporterException("found unsupported interaction type [" + interactionString + "]");
                        }
                        source.interactsWith(target, relType);
                    }
                }
            }
        }
    } catch (IOException | NodeFactoryException e) {
        throw new StudyImporterException(e);
    }
    db.close();
}
Also used : InteractType(org.eol.globi.domain.InteractType) Study(org.eol.globi.domain.Study) HashMap(java.util.HashMap) ZipInputStream(java.util.zip.ZipInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) ZipEntry(java.util.zip.ZipEntry) TaxonImpl(org.eol.globi.domain.TaxonImpl) StudyImpl(org.eol.globi.domain.StudyImpl) LabeledCSVParser(com.Ostermiller.util.LabeledCSVParser) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) Specimen(org.eol.globi.domain.Specimen) ZipInputStream(java.util.zip.ZipInputStream) BufferedReader(java.io.BufferedReader) File(java.io.File) DB(org.mapdb.DB) NullOutputStream(org.apache.commons.io.output.NullOutputStream)

Example 8 with DB

use of org.mapdb.DB in project mapdb by jankotek.

the class Issue418Test method test.

@Test
public void test() {
    final File tmp = TT.tempFile();
    for (int o = 0; o < 2; o++) {
        final DB db = DBMaker.fileDB(tmp).make();
        final DBConcurrentMap map = db.hashMap("foo").expireMaxSize(100).createOrOpen();
        for (int i = 0; i < TT.testScale() * 10000; i++) map.put("foo" + i, "bar" + i);
        db.commit();
        db.close();
    }
}
Also used : DBConcurrentMap(org.mapdb.DBConcurrentMap) File(java.io.File) DB(org.mapdb.DB) Test(org.junit.Test)

Example 9 with DB

use of org.mapdb.DB in project mapdb by jankotek.

the class IssueFromDatumbox method main.

@Test
public void main() throws IOException {
    // Pick one of the following lines to get a different error
    // fails every time - throws java.lang.NullPointerException
    String f = TT.tempFile().getPath();
    // File f = File.createTempFile("mapdb","db"); //fails every time - throws java.io.EOFException exception
    // String f = "/tmp/constantName"; //fails only in the first execution but NOT in any subsequent execution - throws java.lang.NullPointerException
    SomeObject x = new SomeObject();
    x.someValue = 10;
    x.someClass = SomeOtherClass.class;
    DB db = DBMaker.fileDB(f).make();
    Atomic.Var<Object> atomicVar = db.atomicVar("test").createOrOpen();
    atomicVar.set(x);
    db.close();
    db = DBMaker.fileDB(f).make();
    atomicVar = db.atomicVar("test").createOrOpen();
    x = (SomeObject) atomicVar.get();
    assertEquals(10, x.someValue);
    assertEquals(SomeOtherClass.class, x.someClass);
    db.close();
}
Also used : Atomic(org.mapdb.Atomic) DB(org.mapdb.DB) Test(org.junit.Test)

Example 10 with DB

use of org.mapdb.DB in project watchdog by TestRoots.

the class PersisterBase method createDatabase.

private DB createDatabase(final File file) {
    replaceClassLoader();
    DB database = DBMaker.newFileDB(file).closeOnJvmShutdown().make();
    isClosed = false;
    resetOldClassLoader();
    return database;
}
Also used : DB(org.mapdb.DB)

Aggregations

DB (org.mapdb.DB)24 Test (org.junit.Test)9 File (java.io.File)7 KeyValueDataSource (org.ethereum.datasource.KeyValueDataSource)6 LevelDbDataSource (org.ethereum.datasource.LevelDbDataSource)6 BlockDifficulty (co.rsk.core.BlockDifficulty)5 IOException (java.io.IOException)5 BigInteger (java.math.BigInteger)5 Block (org.ethereum.core.Block)5 HashMapDB (org.ethereum.datasource.HashMapDB)5 LabeledCSVParser (com.Ostermiller.util.LabeledCSVParser)3 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2 InputStream (java.io.InputStream)2 HashMap (java.util.HashMap)2 List (java.util.List)2 Map (java.util.Map)2 ZipEntry (java.util.zip.ZipEntry)2 ZipInputStream (java.util.zip.ZipInputStream)2 NullOutputStream (org.apache.commons.io.output.NullOutputStream)2