use of com.sleepycat.je.DatabaseConfig in project intermine by intermine.
the class EntrezPublicationsRetriever method execute.
/**
* Synchronize publications with pubmed using pmid
* @throws Exception if an error occurs
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public void execute() throws Exception {
// Needed so that STAX can find it's implementation classes
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
Database db = null;
Transaction txn = null;
try {
if (osAlias == null) {
throw new BuildException("osAlias attribute is not set");
}
if (outputFile == null) {
throw new BuildException("outputFile attribute is not set");
}
// environment is transactional
EnvironmentConfig envConfig = new EnvironmentConfig();
envConfig.setTransactional(true);
envConfig.setAllowCreate(true);
File tmpDir = new File(System.getProperty("java.io.tmpdir"));
Environment env = new Environment(tmpDir, envConfig);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setTransactional(true);
dbConfig.setAllowCreate(true);
dbConfig.setSortedDuplicates(true);
db = env.openDatabase(null, "publications_db", dbConfig);
txn = env.beginTransaction(null, null);
LOG.info("Starting EntrezPublicationsRetriever");
// write to cache file
Writer writer = new FileWriter(outputFile);
ObjectStore os = ObjectStoreFactory.getObjectStore(osAlias);
Set<Integer> idsToFetch = new HashSet<Integer>();
itemFactory = new ItemFactory(os.getModel(), "-1_");
writer.write(FullRenderer.getHeader() + ENDL);
// Get publications from objectstore by IQL
for (Iterator<Publication> iter = getPublications(os).iterator(); iter.hasNext(); ) {
String pubMedId = iter.next().getPubMedId();
Integer pubMedIdInteger;
try {
pubMedIdInteger = Integer.valueOf(pubMedId);
} catch (NumberFormatException e) {
// not a pubmed id
continue;
}
if (seenPubMeds.contains(pubMedIdInteger)) {
continue;
}
DatabaseEntry key = new DatabaseEntry(pubMedId.getBytes());
DatabaseEntry data = new DatabaseEntry();
if (db.get(txn, key, data, null).equals(OperationStatus.SUCCESS)) {
try {
// Try to find this publication object in berkeleydb and write item xml to
// cache file
ByteArrayInputStream mapInputStream = new ByteArrayInputStream(data.getData());
ObjectInputStream deserializer = new ObjectInputStream(mapInputStream);
Map<String, Object> pubMap = (Map) deserializer.readObject();
writeItems(writer, mapToItems(itemFactory, pubMap));
seenPubMeds.add(pubMedIdInteger);
} catch (EOFException e) {
// ignore and fetch it again
System.err.println("found in cache, but igored due to cache problem: " + pubMedIdInteger);
}
} else {
// berkeleydb cached pubs will be written to cache file, the rest will be
// fetched from NCBI
idsToFetch.add(pubMedIdInteger);
}
}
Iterator<Integer> idIter = idsToFetch.iterator();
Set<Integer> thisBatch = new HashSet<Integer>();
while (idIter.hasNext()) {
Integer pubMedIdInteger = idIter.next();
thisBatch.add(pubMedIdInteger);
if (thisBatch.size() == BATCH_SIZE || !idIter.hasNext() && thisBatch.size() > 0) {
try {
// the server may return less publications than we ask for, so keep a Map
Map<String, Map<String, Object>> fromServerMap = null;
for (int i = 0; i < MAX_TRIES; i++) {
BufferedReader br = new BufferedReader(getReader(thisBatch));
StringBuffer buf = new StringBuffer();
String line;
while ((line = br.readLine()) != null) {
buf.append(line + "\n");
}
fromServerMap = new HashMap<String, Map<String, Object>>();
Throwable throwable = null;
try {
if (loadFullRecord) {
SAXParser.parse(new InputSource(new StringReader(buf.toString())), new FullRecordHandler(fromServerMap), false);
} else {
SAXParser.parse(new InputSource(new StringReader(buf.toString())), new SummaryRecordHandler(fromServerMap), false);
}
} catch (Throwable e) {
LOG.error("Couldn't parse PubMed XML", e);
// try again or re-throw the Throwable
throwable = e;
}
if (i == MAX_TRIES) {
throw new RuntimeException("failed to parse: " + buf.toString() + " - tried " + MAX_TRIES + " times", throwable);
} else {
if (throwable != null) {
// try again
continue;
}
}
for (String id : fromServerMap.keySet()) {
// write fetched pubs to cache file
writeItems(writer, mapToItems(itemFactory, fromServerMap.get(id)));
}
// Added fetched pubs to berkeleydb
addToDb(txn, db, fromServerMap);
break;
}
thisBatch.clear();
} finally {
txn.commit();
// start a new transaction incase there is an exception while parsing
txn = env.beginTransaction(null, null);
}
}
}
writeItems(writer, authorMap.values());
writeItems(writer, meshTerms.values());
writer.write(FullRenderer.getFooter() + ENDL);
writer.flush();
writer.close();
} catch (Throwable e) {
throw new RuntimeException("failed to get all publications", e);
} finally {
if (txn != null) {
txn.commit();
}
if (db != null) {
db.close();
}
Thread.currentThread().setContextClassLoader(cl);
}
}
use of com.sleepycat.je.DatabaseConfig in project BIMserver by opensourceBIM.
the class BerkeleyKeyValueStore method createIndexTable.
public boolean createIndexTable(String tableName, DatabaseSession databaseSession, boolean transactional) throws BimserverDatabaseException {
if (tables.containsKey(tableName)) {
throw new BimserverDatabaseException("Table " + tableName + " already created");
}
DatabaseConfig databaseConfig = new DatabaseConfig();
databaseConfig.setKeyPrefixing(keyPrefixing);
databaseConfig.setAllowCreate(true);
boolean finalTransactional = transactional && useTransactions;
// if (!transactional) {
// databaseConfig.setCacheMode(CacheMode.EVICT_BIN);
// }
databaseConfig.setDeferredWrite(!finalTransactional);
databaseConfig.setTransactional(finalTransactional);
databaseConfig.setSortedDuplicates(true);
Database database = environment.openDatabase(null, tableName, databaseConfig);
if (database == null) {
return false;
}
tables.put(tableName, new TableWrapper(database, finalTransactional));
return true;
}
use of com.sleepycat.je.DatabaseConfig in project BIMserver by opensourceBIM.
the class BerkeleyKeyValueStore method openIndexTable.
public void openIndexTable(DatabaseSession databaseSession, String tableName, boolean transactional) throws BimserverDatabaseException {
if (tables.containsKey(tableName)) {
throw new BimserverDatabaseException("Table " + tableName + " already opened");
}
DatabaseConfig databaseConfig = new DatabaseConfig();
databaseConfig.setKeyPrefixing(keyPrefixing);
databaseConfig.setAllowCreate(false);
boolean finalTransactional = transactional && useTransactions;
// if (!transactional) {
// databaseConfig.setCacheMode(CacheMode.EVICT_BIN);
// }
databaseConfig.setDeferredWrite(!finalTransactional);
databaseConfig.setTransactional(finalTransactional);
databaseConfig.setSortedDuplicates(true);
Database database = environment.openDatabase(null, tableName, databaseConfig);
if (database == null) {
throw new BimserverDatabaseException("Table " + tableName + " not found in database");
}
tables.put(tableName, new TableWrapper(database, finalTransactional));
}
use of com.sleepycat.je.DatabaseConfig in project parliament by SemWebCentral.
the class NumericIndex method initialize.
/**
* Initialize the environment and configure the databases.
*
* @throws DatabaseException if an error occurs
*/
private void initialize() throws DatabaseException {
File dirFile = new File(dirName);
dirFile.mkdirs();
envConfig = new EnvironmentConfig();
envConfig.setAllowCreate(true);
envConfig.setLocking(false);
envConfig.setReadOnly(false);
envConfig.setTransactional(false);
environment = new Environment(dirFile, envConfig);
dbConfig = new DatabaseConfig();
dbConfig.setAllowCreate(true);
dbConfig.setDeferredWrite(true);
dbConfig.setReadOnly(false);
dbConfig.setTransactional(false);
// Create secondary database for indexing values
numbersDbConfig = new SecondaryConfig();
numbersDbConfig.setAllowCreate(true);
numbersDbConfig.setSortedDuplicates(true);
numbersDbConfig.setAllowPopulate(true);
numbersDbConfig.setKeyCreator(new NumbersKeyCreator(recordFactory.getNumberSize()));
}
use of com.sleepycat.je.DatabaseConfig in project heritrix3 by internetarchive.
the class PrefixFinderTest method testStoredSortedMap.
public void testStoredSortedMap() throws Exception {
EnvironmentConfig config = new EnvironmentConfig();
config.setAllowCreate(true);
config.setCachePercent(5);
File f = new File(getTmpDir(), "PrefixFinderText");
FileUtils.deleteQuietly(f);
org.archive.util.FileUtils.ensureWriteableDirectory(f);
Environment bdbEnvironment = new Environment(f, config);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setAllowCreate(true);
dbConfig.setDeferredWrite(true);
Database db = bdbEnvironment.openDatabase(null, "test", dbConfig);
StoredSortedMap<String, String> ssm = new StoredSortedMap<String, String>(db, new StringBinding(), new StringBinding(), true);
testUrlsNoMatch(ssm);
db.close();
bdbEnvironment.close();
}
Aggregations