Search in sources :

Example 6 with ClassLoaderResolver

use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.

the class ProxoolConnectionPoolFactory method createConnectionPool.

/* (non-Javadoc)
     * @see org.datanucleus.store.rdbms.datasource.ConnectionPoolFactory#createConnectionPool(org.datanucleus.store.StoreManager)
     */
public ConnectionPool createConnectionPool(StoreManager storeMgr) {
    ClassLoaderResolver clr = storeMgr.getNucleusContext().getClassLoaderResolver(null);
    // Load the database driver
    String dbDriver = storeMgr.getConnectionDriverName();
    if (!StringUtils.isWhitespace(dbDriver)) {
        loadDriver(dbDriver, clr);
    }
    // Check the presence of commons-logging
    ClassUtils.assertClassForJarExistsInClasspath(clr, "org.apache.commons.logging.Log", "commons-logging.jar");
    ClassUtils.assertClassForJarExistsInClasspath(clr, "org.logicalcobwebs.proxool.ProxoolDriver", "proxool.jar");
    // Create a Proxool pool with alias "datanucleus{poolNumber}"
    String dbURL = storeMgr.getConnectionURL();
    String alias = "datanucleus" + poolNumber;
    String poolURL = null;
    try {
        // Apply any properties
        Properties dbProps = getPropertiesForDriver(storeMgr);
        if (storeMgr.hasProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_MAX_CONNECTIONS)) {
            int value = storeMgr.getIntProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_MAX_CONNECTIONS);
            if (value > 0) {
                dbProps.put("proxool.maximum-connection-count", "" + value);
            } else {
                dbProps.put("proxool.maximum-connection-count", "10");
            }
        } else {
            dbProps.put("proxool.maximum-connection-count", "10");
        }
        if (storeMgr.hasProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_TEST_SQL)) {
            String value = storeMgr.getStringProperty(RDBMSPropertyNames.PROPERTY_CONNECTION_POOL_TEST_SQL);
            dbProps.put("proxool.house-keeping-test-sql", value);
        } else {
            dbProps.put("proxool.house-keeping-test-sql", "SELECT 1");
        }
        poolURL = "proxool." + alias + ":" + dbDriver + ":" + dbURL;
        poolNumber++;
        org.logicalcobwebs.proxool.ProxoolFacade.registerConnectionPool(poolURL, dbProps);
    } catch (org.logicalcobwebs.proxool.ProxoolException pe) {
        pe.printStackTrace();
        throw new DatastorePoolException("Proxool", dbDriver, dbURL, pe);
    }
    org.logicalcobwebs.proxool.ProxoolDataSource ds = new org.logicalcobwebs.proxool.ProxoolDataSource(alias);
    return new ProxoolConnectionPool(ds, poolURL);
}
Also used : ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) Properties(java.util.Properties)

Example 7 with ClassLoaderResolver

use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.

the class RDBMSPersistenceHandler method updateObject.

// ------------------------------ Update ----------------------------------
/**
 * Updates a persistent object in the database.
 * The update can take place in several steps, one update per table that it is stored in (depending on
 * which fields are updated).
 * e.g When updating an object that uses "new-table" inheritance for each level of the inheritance tree
 * then will get an UPDATE into each table. When updating an object that uses "complete-table"
 * inheritance then will get a single UPDATE into its table.
 * @param op The ObjectProvider of the object to be updated.
 * @param fieldNumbers The numbers of the fields to be updated.
 * @throws NucleusDataStoreException when an error occurs in the datastore communication
 */
public void updateObject(ObjectProvider op, int[] fieldNumbers) {
    // Check if read-only so update not permitted
    assertReadOnlyForUpdateOfObject(op);
    // Check if we need to do any updates to the schema before updating this object
    checkForSchemaUpdatesForFieldsOfObject(op, fieldNumbers);
    AbstractMemberMetaData[] mmds = null;
    if (fieldNumbers != null && fieldNumbers.length > 0) {
        // Convert the field numbers for this class into their metadata for the table
        ExecutionContext ec = op.getExecutionContext();
        mmds = new AbstractMemberMetaData[fieldNumbers.length];
        for (int i = 0; i < mmds.length; i++) {
            mmds[i] = op.getClassMetaData().getMetaDataForManagedMemberAtAbsolutePosition(fieldNumbers[i]);
        }
        if (ec.getStatistics() != null) {
            ec.getStatistics().incrementUpdateCount();
        }
        ClassLoaderResolver clr = ec.getClassLoaderResolver();
        DatastoreClass dc = getDatastoreClass(op.getObject().getClass().getName(), clr);
        updateObjectInTable(dc, op, clr, mmds);
    }
}
Also used : ExecutionContext(org.datanucleus.ExecutionContext) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) SecondaryDatastoreClass(org.datanucleus.store.rdbms.table.SecondaryDatastoreClass) DatastoreClass(org.datanucleus.store.rdbms.table.DatastoreClass) AbstractMemberMetaData(org.datanucleus.metadata.AbstractMemberMetaData)

Example 8 with ClassLoaderResolver

use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.

the class RDBMSPersistenceHandler method deleteObject.

// ------------------------------ Delete ----------------------------------
/**
 * Deletes a persistent object from the database.
 * The delete can take place in several steps, one delete per table that it is stored in.
 * e.g When deleting an object that uses "new-table" inheritance for each level of the inheritance tree
 * then will get an DELETE for each table. When deleting an object that uses "complete-table"
 * inheritance then will get a single DELETE for its table.
 * @param op The ObjectProvider of the object to be deleted.
 * @throws NucleusDataStoreException when an error occurs in the datastore communication
 */
public void deleteObject(ObjectProvider op) {
    // Check if read-only so update not permitted
    assertReadOnlyForUpdateOfObject(op);
    ExecutionContext ec = op.getExecutionContext();
    if (ec.getStatistics() != null) {
        ec.getStatistics().incrementDeleteCount();
    }
    ClassLoaderResolver clr = op.getExecutionContext().getClassLoaderResolver();
    DatastoreClass dc = getDatastoreClass(op.getClassMetaData().getFullClassName(), clr);
    deleteObjectFromTable(dc, op, clr);
}
Also used : ExecutionContext(org.datanucleus.ExecutionContext) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) SecondaryDatastoreClass(org.datanucleus.store.rdbms.table.SecondaryDatastoreClass) DatastoreClass(org.datanucleus.store.rdbms.table.DatastoreClass)

Example 9 with ClassLoaderResolver

use of org.datanucleus.ClassLoaderResolver in project hive by apache.

the class ObjectStore method clearOutPmfClassLoaderCache.

private static void clearOutPmfClassLoaderCache(PersistenceManagerFactory pmf) {
    if ((pmf == null) || (!(pmf instanceof JDOPersistenceManagerFactory))) {
        return;
    }
    // NOTE : This is hacky, and this section of code is fragile depending on DN code varnames
    // so it's likely to stop working at some time in the future, especially if we upgrade DN
    // versions, so we actively need to find a better way to make sure the leak doesn't happen
    // instead of just clearing out the cache after every call.
    JDOPersistenceManagerFactory jdoPmf = (JDOPersistenceManagerFactory) pmf;
    NucleusContext nc = jdoPmf.getNucleusContext();
    try {
        Field pmCache = pmf.getClass().getDeclaredField("pmCache");
        pmCache.setAccessible(true);
        Set<JDOPersistenceManager> pmSet = (Set<JDOPersistenceManager>) pmCache.get(pmf);
        for (JDOPersistenceManager pm : pmSet) {
            org.datanucleus.ExecutionContext ec = pm.getExecutionContext();
            if (ec instanceof org.datanucleus.ExecutionContextThreadedImpl) {
                ClassLoaderResolver clr = ((org.datanucleus.ExecutionContextThreadedImpl) ec).getClassLoaderResolver();
                clearClr(clr);
            }
        }
        org.datanucleus.plugin.PluginManager pluginManager = jdoPmf.getNucleusContext().getPluginManager();
        Field registryField = pluginManager.getClass().getDeclaredField("registry");
        registryField.setAccessible(true);
        org.datanucleus.plugin.PluginRegistry registry = (org.datanucleus.plugin.PluginRegistry) registryField.get(pluginManager);
        if (registry instanceof org.datanucleus.plugin.NonManagedPluginRegistry) {
            org.datanucleus.plugin.NonManagedPluginRegistry nRegistry = (org.datanucleus.plugin.NonManagedPluginRegistry) registry;
            Field clrField = nRegistry.getClass().getDeclaredField("clr");
            clrField.setAccessible(true);
            ClassLoaderResolver clr = (ClassLoaderResolver) clrField.get(nRegistry);
            clearClr(clr);
        }
        if (nc instanceof org.datanucleus.PersistenceNucleusContextImpl) {
            org.datanucleus.PersistenceNucleusContextImpl pnc = (org.datanucleus.PersistenceNucleusContextImpl) nc;
            org.datanucleus.store.types.TypeManagerImpl tm = (org.datanucleus.store.types.TypeManagerImpl) pnc.getTypeManager();
            Field clrField = tm.getClass().getDeclaredField("clr");
            clrField.setAccessible(true);
            ClassLoaderResolver clr = (ClassLoaderResolver) clrField.get(tm);
            clearClr(clr);
            Field storeMgrField = pnc.getClass().getDeclaredField("storeMgr");
            storeMgrField.setAccessible(true);
            org.datanucleus.store.rdbms.RDBMSStoreManager storeMgr = (org.datanucleus.store.rdbms.RDBMSStoreManager) storeMgrField.get(pnc);
            Field backingStoreField = storeMgr.getClass().getDeclaredField("backingStoreByMemberName");
            backingStoreField.setAccessible(true);
            Map<String, Store> backingStoreByMemberName = (Map<String, Store>) backingStoreField.get(storeMgr);
            for (Store store : backingStoreByMemberName.values()) {
                org.datanucleus.store.rdbms.scostore.BaseContainerStore baseStore = (org.datanucleus.store.rdbms.scostore.BaseContainerStore) store;
                clrField = org.datanucleus.store.rdbms.scostore.BaseContainerStore.class.getDeclaredField("clr");
                clrField.setAccessible(true);
                clr = (ClassLoaderResolver) clrField.get(baseStore);
                clearClr(clr);
            }
        }
        Field classLoaderResolverMap = AbstractNucleusContext.class.getDeclaredField("classLoaderResolverMap");
        classLoaderResolverMap.setAccessible(true);
        Map<String, ClassLoaderResolver> loaderMap = (Map<String, ClassLoaderResolver>) classLoaderResolverMap.get(nc);
        for (ClassLoaderResolver clr : loaderMap.values()) {
            clearClr(clr);
        }
        classLoaderResolverMap.set(nc, new HashMap<String, ClassLoaderResolver>());
        LOG.debug("Removed cached classloaders from DataNucleus NucleusContext");
    } catch (Exception e) {
        LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext", e);
    }
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) PrincipalPrivilegeSet(org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet) NucleusContext(org.datanucleus.NucleusContext) AbstractNucleusContext(org.datanucleus.AbstractNucleusContext) Store(org.datanucleus.store.scostore.Store) JDOPersistenceManager(org.datanucleus.api.jdo.JDOPersistenceManager) Field(java.lang.reflect.Field) JDOPersistenceManagerFactory(org.datanucleus.api.jdo.JDOPersistenceManagerFactory) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) SQLIntegrityConstraintViolationException(java.sql.SQLIntegrityConstraintViolationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) JDOCanRetryException(javax.jdo.JDOCanRetryException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) JDOException(javax.jdo.JDOException) MissingTableException(org.datanucleus.store.rdbms.exceptions.MissingTableException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) JDODataStoreException(javax.jdo.JDODataStoreException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) Map(java.util.Map) WeakValueMap(org.datanucleus.util.WeakValueMap) MRoleMap(org.apache.hadoop.hive.metastore.model.MRoleMap) HashMap(java.util.HashMap)

Example 10 with ClassLoaderResolver

use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.

the class RDBMSStoreManager method deleteSchemaForClasses.

public void deleteSchemaForClasses(Set<String> classNames, Properties props) {
    if (!classNames.isEmpty()) {
        // Delete the tables
        String ddlFilename = props != null ? props.getProperty("ddlFilename") : null;
        String completeDdlProp = props != null ? props.getProperty("completeDdl") : null;
        boolean completeDdl = completeDdlProp != null && completeDdlProp.equalsIgnoreCase("true");
        String autoStartProp = props != null ? props.getProperty("autoStartTable") : null;
        boolean autoStart = autoStartProp != null && autoStartProp.equalsIgnoreCase("true");
        ClassLoaderResolver clr = nucleusContext.getClassLoaderResolver(null);
        FileWriter ddlFileWriter = null;
        try {
            performingDeleteSchemaForClasses = true;
            if (ddlFilename != null) {
                // Open the DDL file for writing
                File ddlFile = StringUtils.getFileForFilename(ddlFilename);
                if (ddlFile.exists()) {
                    // Delete existing file
                    ddlFile.delete();
                }
                if (ddlFile.getParentFile() != null && !ddlFile.getParentFile().exists()) {
                    // Make sure the directory exists
                    ddlFile.getParentFile().mkdirs();
                }
                ddlFile.createNewFile();
                ddlFileWriter = new FileWriter(ddlFile);
                SimpleDateFormat fmt = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
                ddlFileWriter.write("------------------------------------------------------------------\n");
                ddlFileWriter.write("-- DataNucleus SchemaTool " + "(ran at " + fmt.format(new java.util.Date()) + ")\n");
                ddlFileWriter.write("------------------------------------------------------------------\n");
                ddlFileWriter.write("-- Delete schema required for the following classes:-\n");
                Iterator classNameIter = classNames.iterator();
                while (classNameIter.hasNext()) {
                    ddlFileWriter.write("--     " + classNameIter.next() + "\n");
                }
                ddlFileWriter.write("--\n");
            }
            try {
                if (ddlFileWriter != null) {
                    this.ddlWriter = ddlFileWriter;
                    this.completeDDL = completeDdl;
                    this.writtenDdlStatements = new HashSet();
                }
                // Generate the tables/constraints for these classes (so we know the tables to delete)
                // TODO This will add CREATE to the DDL, need to be able to omit this
                String[] classNameArray = classNames.toArray(new String[classNames.size()]);
                // Add them to mgr first
                manageClasses(clr, classNameArray);
                // Delete the tables of the required classes TODO Why use READ_COMMITTED for delete but SERIALIZABLE for add?
                int isolationLevel = hasProperty(PropertyNames.PROPERTY_SCHEMA_TXN_ISOLATION) ? TransactionUtils.getTransactionIsolationLevelForName(getStringProperty(PropertyNames.PROPERTY_SCHEMA_TXN_ISOLATION)) : Connection.TRANSACTION_READ_COMMITTED;
                DeleteTablesSchemaTransaction deleteTablesTxn = new DeleteTablesSchemaTransaction(this, isolationLevel, storeDataMgr);
                deleteTablesTxn.setWriter(ddlWriter);
                boolean success = true;
                try {
                    deleteTablesTxn.execute(clr);
                } catch (NucleusException ne) {
                    success = false;
                    throw ne;
                } finally {
                    if (success) {
                        clearSchemaData();
                    }
                }
                if (autoStart) {
                // TODO Delete the SchemaTable auto-starter table
                }
            // TODO Delete sequences and sequenceTables
            } finally {
                performingDeleteSchemaForClasses = false;
                if (ddlFileWriter != null) {
                    this.ddlWriter = null;
                    this.completeDDL = false;
                    this.writtenDdlStatements.clear();
                    this.writtenDdlStatements = null;
                    ddlFileWriter.close();
                }
            }
        } catch (IOException ioe) {
        // Error in writing DDL file
        // TODO Handle this
        }
    } else {
        String msg = Localiser.msg("014039");
        NucleusLogger.DATASTORE_SCHEMA.error(msg);
        System.out.println(msg);
        throw new NucleusException(msg);
    }
}
Also used : FileWriter(java.io.FileWriter) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) MacroString(org.datanucleus.util.MacroString) IOException(java.io.IOException) Date(java.util.Date) ListIterator(java.util.ListIterator) Iterator(java.util.Iterator) NucleusException(org.datanucleus.exceptions.NucleusException) File(java.io.File) SimpleDateFormat(java.text.SimpleDateFormat) HashSet(java.util.HashSet)

Aggregations

ClassLoaderResolver (org.datanucleus.ClassLoaderResolver)242 AbstractMemberMetaData (org.datanucleus.metadata.AbstractMemberMetaData)94 MetaDataManager (org.datanucleus.metadata.MetaDataManager)72 NucleusContext (org.datanucleus.NucleusContext)68 AbstractClassMetaData (org.datanucleus.metadata.AbstractClassMetaData)65 DatastoreClass (org.datanucleus.store.rdbms.table.DatastoreClass)65 PersistenceNucleusContextImpl (org.datanucleus.PersistenceNucleusContextImpl)56 JavaTypeMapping (org.datanucleus.store.rdbms.mapping.java.JavaTypeMapping)56 ClassMetaData (org.datanucleus.metadata.ClassMetaData)54 JPAMetaDataManager (org.datanucleus.api.jpa.metadata.JPAMetaDataManager)51 RDBMSStoreManager (org.datanucleus.store.rdbms.RDBMSStoreManager)44 NucleusException (org.datanucleus.exceptions.NucleusException)42 PersistenceUnitMetaData (org.datanucleus.metadata.PersistenceUnitMetaData)40 SQLExpressionFactory (org.datanucleus.store.rdbms.sql.expression.SQLExpressionFactory)40 SQLExpression (org.datanucleus.store.rdbms.sql.expression.SQLExpression)39 NucleusUserException (org.datanucleus.exceptions.NucleusUserException)37 ArrayList (java.util.ArrayList)36 ExecutionContext (org.datanucleus.ExecutionContext)32 SelectStatement (org.datanucleus.store.rdbms.sql.SelectStatement)30 HashMap (java.util.HashMap)28