use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class ProxoolConnectionPoolFactory method createConnectionPool.
/* (non-Javadoc)
* @see org.datanucleus.store.rdbms.datasource.ConnectionPoolFactory#createConnectionPool(org.datanucleus.store.StoreManager)
*/
public ConnectionPool createConnectionPool(StoreManager storeMgr) {
ClassLoaderResolver clr = storeMgr.getNucleusContext().getClassLoaderResolver(null);
// Load the database driver
String dbDriver = storeMgr.getConnectionDriverName();
if (!StringUtils.isWhitespace(dbDriver)) {
loadDriver(dbDriver, clr);
}
// Check the presence of commons-logging
ClassUtils.assertClassForJarExistsInClasspath(clr, "org.apache.commons.logging.Log", "commons-logging.jar");
ClassUtils.assertClassForJarExistsInClasspath(clr, "org.logicalcobwebs.proxool.ProxoolDriver", "proxool.jar");
// Create a Proxool pool with alias "datanucleus{poolNumber}"
String dbURL = storeMgr.getConnectionURL();
String alias = "datanucleus" + poolNumber;
String poolURL = null;
try {
// Apply any properties
Properties dbProps = getPropertiesForDriver(storeMgr);
if (storeMgr.hasProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_MAX_CONNECTIONS)) {
int value = storeMgr.getIntProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_MAX_CONNECTIONS);
if (value > 0) {
dbProps.put("proxool.maximum-connection-count", "" + value);
} else {
dbProps.put("proxool.maximum-connection-count", "10");
}
} else {
dbProps.put("proxool.maximum-connection-count", "10");
}
if (storeMgr.hasProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_TEST_SQL)) {
String value = storeMgr.getStringProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_TEST_SQL);
dbProps.put("proxool.house-keeping-test-sql", value);
} else {
dbProps.put("proxool.house-keeping-test-sql", "SELECT 1");
}
poolURL = "proxool." + alias + ":" + dbDriver + ":" + dbURL;
poolNumber++;
org.logicalcobwebs.proxool.ProxoolFacade.registerConnectionPool(poolURL, dbProps);
} catch (org.logicalcobwebs.proxool.ProxoolException pe) {
pe.printStackTrace();
throw new DatastorePoolException("Proxool", dbDriver, dbURL, pe);
}
org.logicalcobwebs.proxool.ProxoolDataSource ds = new org.logicalcobwebs.proxool.ProxoolDataSource(alias);
return new ProxoolConnectionPool(ds, poolURL);
}
use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class RDBMSPersistenceHandler method updateObject.
// ------------------------------ Update ----------------------------------
/**
* Updates a persistent object in the database.
* The update can take place in several steps, one update per table that it is stored in (depending on
* which fields are updated).
* e.g When updating an object that uses "new-table" inheritance for each level of the inheritance tree
* then will get an UPDATE into each table. When updating an object that uses "complete-table"
* inheritance then will get a single UPDATE into its table.
* @param op The ObjectProvider of the object to be updated.
* @param fieldNumbers The numbers of the fields to be updated.
* @throws NucleusDataStoreException when an error occurs in the datastore communication
*/
public void updateObject(ObjectProvider op, int[] fieldNumbers) {
// Check if read-only so update not permitted
assertReadOnlyForUpdateOfObject(op);
// Check if we need to do any updates to the schema before updating this object
checkForSchemaUpdatesForFieldsOfObject(op, fieldNumbers);
AbstractMemberMetaData[] mmds = null;
if (fieldNumbers != null && fieldNumbers.length > 0) {
// Convert the field numbers for this class into their metadata for the table
ExecutionContext ec = op.getExecutionContext();
mmds = new AbstractMemberMetaData[fieldNumbers.length];
for (int i = 0; i < mmds.length; i++) {
mmds[i] = op.getClassMetaData().getMetaDataForManagedMemberAtAbsolutePosition(fieldNumbers[i]);
}
if (ec.getStatistics() != null) {
ec.getStatistics().incrementUpdateCount();
}
ClassLoaderResolver clr = ec.getClassLoaderResolver();
DatastoreClass dc = getDatastoreClass(op.getObject().getClass().getName(), clr);
updateObjectInTable(dc, op, clr, mmds);
}
}
use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class RDBMSPersistenceHandler method deleteObject.
// ------------------------------ Delete ----------------------------------
/**
* Deletes a persistent object from the database.
* The delete can take place in several steps, one delete per table that it is stored in.
* e.g When deleting an object that uses "new-table" inheritance for each level of the inheritance tree
* then will get an DELETE for each table. When deleting an object that uses "complete-table"
* inheritance then will get a single DELETE for its table.
* @param op The ObjectProvider of the object to be deleted.
* @throws NucleusDataStoreException when an error occurs in the datastore communication
*/
public void deleteObject(ObjectProvider op) {
// Check if read-only so update not permitted
assertReadOnlyForUpdateOfObject(op);
ExecutionContext ec = op.getExecutionContext();
if (ec.getStatistics() != null) {
ec.getStatistics().incrementDeleteCount();
}
ClassLoaderResolver clr = op.getExecutionContext().getClassLoaderResolver();
DatastoreClass dc = getDatastoreClass(op.getClassMetaData().getFullClassName(), clr);
deleteObjectFromTable(dc, op, clr);
}
use of org.datanucleus.ClassLoaderResolver in project hive by apache.
the class ObjectStore method clearOutPmfClassLoaderCache.
private static void clearOutPmfClassLoaderCache(PersistenceManagerFactory pmf) {
if ((pmf == null) || (!(pmf instanceof JDOPersistenceManagerFactory))) {
return;
}
// NOTE : This is hacky, and this section of code is fragile depending on DN code varnames
// so it's likely to stop working at some time in the future, especially if we upgrade DN
// versions, so we actively need to find a better way to make sure the leak doesn't happen
// instead of just clearing out the cache after every call.
JDOPersistenceManagerFactory jdoPmf = (JDOPersistenceManagerFactory) pmf;
NucleusContext nc = jdoPmf.getNucleusContext();
try {
Field pmCache = pmf.getClass().getDeclaredField("pmCache");
pmCache.setAccessible(true);
Set<JDOPersistenceManager> pmSet = (Set<JDOPersistenceManager>) pmCache.get(pmf);
for (JDOPersistenceManager pm : pmSet) {
org.datanucleus.ExecutionContext ec = pm.getExecutionContext();
if (ec instanceof org.datanucleus.ExecutionContextThreadedImpl) {
ClassLoaderResolver clr = ((org.datanucleus.ExecutionContextThreadedImpl) ec).getClassLoaderResolver();
clearClr(clr);
}
}
org.datanucleus.plugin.PluginManager pluginManager = jdoPmf.getNucleusContext().getPluginManager();
Field registryField = pluginManager.getClass().getDeclaredField("registry");
registryField.setAccessible(true);
org.datanucleus.plugin.PluginRegistry registry = (org.datanucleus.plugin.PluginRegistry) registryField.get(pluginManager);
if (registry instanceof org.datanucleus.plugin.NonManagedPluginRegistry) {
org.datanucleus.plugin.NonManagedPluginRegistry nRegistry = (org.datanucleus.plugin.NonManagedPluginRegistry) registry;
Field clrField = nRegistry.getClass().getDeclaredField("clr");
clrField.setAccessible(true);
ClassLoaderResolver clr = (ClassLoaderResolver) clrField.get(nRegistry);
clearClr(clr);
}
if (nc instanceof org.datanucleus.PersistenceNucleusContextImpl) {
org.datanucleus.PersistenceNucleusContextImpl pnc = (org.datanucleus.PersistenceNucleusContextImpl) nc;
org.datanucleus.store.types.TypeManagerImpl tm = (org.datanucleus.store.types.TypeManagerImpl) pnc.getTypeManager();
Field clrField = tm.getClass().getDeclaredField("clr");
clrField.setAccessible(true);
ClassLoaderResolver clr = (ClassLoaderResolver) clrField.get(tm);
clearClr(clr);
Field storeMgrField = pnc.getClass().getDeclaredField("storeMgr");
storeMgrField.setAccessible(true);
org.datanucleus.store.rdbms.RDBMSStoreManager storeMgr = (org.datanucleus.store.rdbms.RDBMSStoreManager) storeMgrField.get(pnc);
Field backingStoreField = storeMgr.getClass().getDeclaredField("backingStoreByMemberName");
backingStoreField.setAccessible(true);
Map<String, Store> backingStoreByMemberName = (Map<String, Store>) backingStoreField.get(storeMgr);
for (Store store : backingStoreByMemberName.values()) {
org.datanucleus.store.rdbms.scostore.BaseContainerStore baseStore = (org.datanucleus.store.rdbms.scostore.BaseContainerStore) store;
clrField = org.datanucleus.store.rdbms.scostore.BaseContainerStore.class.getDeclaredField("clr");
clrField.setAccessible(true);
clr = (ClassLoaderResolver) clrField.get(baseStore);
clearClr(clr);
}
}
Field classLoaderResolverMap = AbstractNucleusContext.class.getDeclaredField("classLoaderResolverMap");
classLoaderResolverMap.setAccessible(true);
Map<String, ClassLoaderResolver> loaderMap = (Map<String, ClassLoaderResolver>) classLoaderResolverMap.get(nc);
for (ClassLoaderResolver clr : loaderMap.values()) {
clearClr(clr);
}
classLoaderResolverMap.set(nc, new HashMap<String, ClassLoaderResolver>());
LOG.debug("Removed cached classloaders from DataNucleus NucleusContext");
} catch (Exception e) {
LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext", e);
}
}
use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class RDBMSStoreManager method deleteSchemaForClasses.
public void deleteSchemaForClasses(Set<String> classNames, Properties props) {
if (!classNames.isEmpty()) {
// Delete the tables
String ddlFilename = props != null ? props.getProperty("ddlFilename") : null;
String completeDdlProp = props != null ? props.getProperty("completeDdl") : null;
boolean completeDdl = completeDdlProp != null && completeDdlProp.equalsIgnoreCase("true");
String autoStartProp = props != null ? props.getProperty("autoStartTable") : null;
boolean autoStart = autoStartProp != null && autoStartProp.equalsIgnoreCase("true");
ClassLoaderResolver clr = nucleusContext.getClassLoaderResolver(null);
FileWriter ddlFileWriter = null;
try {
performingDeleteSchemaForClasses = true;
if (ddlFilename != null) {
// Open the DDL file for writing
File ddlFile = StringUtils.getFileForFilename(ddlFilename);
if (ddlFile.exists()) {
// Delete existing file
ddlFile.delete();
}
if (ddlFile.getParentFile() != null && !ddlFile.getParentFile().exists()) {
// Make sure the directory exists
ddlFile.getParentFile().mkdirs();
}
ddlFile.createNewFile();
ddlFileWriter = new FileWriter(ddlFile);
SimpleDateFormat fmt = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
ddlFileWriter.write("------------------------------------------------------------------\n");
ddlFileWriter.write("-- DataNucleus SchemaTool " + "(ran at " + fmt.format(new java.util.Date()) + ")\n");
ddlFileWriter.write("------------------------------------------------------------------\n");
ddlFileWriter.write("-- Delete schema required for the following classes:-\n");
Iterator classNameIter = classNames.iterator();
while (classNameIter.hasNext()) {
ddlFileWriter.write("-- " + classNameIter.next() + "\n");
}
ddlFileWriter.write("--\n");
}
try {
if (ddlFileWriter != null) {
this.ddlWriter = ddlFileWriter;
this.completeDDL = completeDdl;
this.writtenDdlStatements = new HashSet();
}
// Generate the tables/constraints for these classes (so we know the tables to delete)
// TODO This will add CREATE to the DDL, need to be able to omit this
String[] classNameArray = classNames.toArray(new String[classNames.size()]);
// Add them to mgr first
manageClasses(clr, classNameArray);
// Delete the tables of the required classes TODO Why use READ_COMMITTED for delete but SERIALIZABLE for add?
int isolationLevel = hasProperty(PropertyNames.PROPERTY_SCHEMA_TXN_ISOLATION) ? TransactionUtils.getTransactionIsolationLevelForName(getStringProperty(PropertyNames.PROPERTY_SCHEMA_TXN_ISOLATION)) : Connection.TRANSACTION_READ_COMMITTED;
DeleteTablesSchemaTransaction deleteTablesTxn = new DeleteTablesSchemaTransaction(this, isolationLevel, storeDataMgr);
deleteTablesTxn.setWriter(ddlWriter);
boolean success = true;
try {
deleteTablesTxn.execute(clr);
} catch (NucleusException ne) {
success = false;
throw ne;
} finally {
if (success) {
clearSchemaData();
}
}
if (autoStart) {
// TODO Delete the SchemaTable auto-starter table
}
// TODO Delete sequences and sequenceTables
} finally {
performingDeleteSchemaForClasses = false;
if (ddlFileWriter != null) {
this.ddlWriter = null;
this.completeDDL = false;
this.writtenDdlStatements.clear();
this.writtenDdlStatements = null;
ddlFileWriter.close();
}
}
} catch (IOException ioe) {
// Error in writing DDL file
// TODO Handle this
}
} else {
String msg = Localiser.msg("014039");
NucleusLogger.DATASTORE_SCHEMA.error(msg);
System.out.println(msg);
throw new NucleusException(msg);
}
}
Aggregations