Search in sources :

Example 1 with Store

use of org.datanucleus.store.scostore.Store in project hive by apache.

the class ObjectStore method clearOutPmfClassLoaderCache.

private static void clearOutPmfClassLoaderCache(PersistenceManagerFactory pmf) {
    if ((pmf == null) || (!(pmf instanceof JDOPersistenceManagerFactory))) {
        return;
    }
    // NOTE : This is hacky, and this section of code is fragile depending on DN code varnames
    // so it's likely to stop working at some time in the future, especially if we upgrade DN
    // versions, so we actively need to find a better way to make sure the leak doesn't happen
    // instead of just clearing out the cache after every call.
    JDOPersistenceManagerFactory jdoPmf = (JDOPersistenceManagerFactory) pmf;
    NucleusContext nc = jdoPmf.getNucleusContext();
    try {
        Field pmCache = pmf.getClass().getDeclaredField("pmCache");
        pmCache.setAccessible(true);
        Set<JDOPersistenceManager> pmSet = (Set<JDOPersistenceManager>) pmCache.get(pmf);
        for (JDOPersistenceManager pm : pmSet) {
            org.datanucleus.ExecutionContext ec = pm.getExecutionContext();
            if (ec instanceof org.datanucleus.ExecutionContextThreadedImpl) {
                ClassLoaderResolver clr = ((org.datanucleus.ExecutionContextThreadedImpl) ec).getClassLoaderResolver();
                clearClr(clr);
            }
        }
        org.datanucleus.plugin.PluginManager pluginManager = jdoPmf.getNucleusContext().getPluginManager();
        Field registryField = pluginManager.getClass().getDeclaredField("registry");
        registryField.setAccessible(true);
        org.datanucleus.plugin.PluginRegistry registry = (org.datanucleus.plugin.PluginRegistry) registryField.get(pluginManager);
        if (registry instanceof org.datanucleus.plugin.NonManagedPluginRegistry) {
            org.datanucleus.plugin.NonManagedPluginRegistry nRegistry = (org.datanucleus.plugin.NonManagedPluginRegistry) registry;
            Field clrField = nRegistry.getClass().getDeclaredField("clr");
            clrField.setAccessible(true);
            ClassLoaderResolver clr = (ClassLoaderResolver) clrField.get(nRegistry);
            clearClr(clr);
        }
        if (nc instanceof org.datanucleus.PersistenceNucleusContextImpl) {
            org.datanucleus.PersistenceNucleusContextImpl pnc = (org.datanucleus.PersistenceNucleusContextImpl) nc;
            org.datanucleus.store.types.TypeManagerImpl tm = (org.datanucleus.store.types.TypeManagerImpl) pnc.getTypeManager();
            Field clrField = tm.getClass().getDeclaredField("clr");
            clrField.setAccessible(true);
            ClassLoaderResolver clr = (ClassLoaderResolver) clrField.get(tm);
            clearClr(clr);
            Field storeMgrField = pnc.getClass().getDeclaredField("storeMgr");
            storeMgrField.setAccessible(true);
            org.datanucleus.store.rdbms.RDBMSStoreManager storeMgr = (org.datanucleus.store.rdbms.RDBMSStoreManager) storeMgrField.get(pnc);
            Field backingStoreField = storeMgr.getClass().getDeclaredField("backingStoreByMemberName");
            backingStoreField.setAccessible(true);
            Map<String, Store> backingStoreByMemberName = (Map<String, Store>) backingStoreField.get(storeMgr);
            for (Store store : backingStoreByMemberName.values()) {
                org.datanucleus.store.rdbms.scostore.BaseContainerStore baseStore = (org.datanucleus.store.rdbms.scostore.BaseContainerStore) store;
                clrField = org.datanucleus.store.rdbms.scostore.BaseContainerStore.class.getDeclaredField("clr");
                clrField.setAccessible(true);
                clr = (ClassLoaderResolver) clrField.get(baseStore);
                clearClr(clr);
            }
        }
        Field classLoaderResolverMap = AbstractNucleusContext.class.getDeclaredField("classLoaderResolverMap");
        classLoaderResolverMap.setAccessible(true);
        Map<String, ClassLoaderResolver> loaderMap = (Map<String, ClassLoaderResolver>) classLoaderResolverMap.get(nc);
        for (ClassLoaderResolver clr : loaderMap.values()) {
            clearClr(clr);
        }
        classLoaderResolverMap.set(nc, new HashMap<String, ClassLoaderResolver>());
        LOG.debug("Removed cached classloaders from DataNucleus NucleusContext");
    } catch (Exception e) {
        LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext", e);
    }
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) PrincipalPrivilegeSet(org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet) NucleusContext(org.datanucleus.NucleusContext) AbstractNucleusContext(org.datanucleus.AbstractNucleusContext) Store(org.datanucleus.store.scostore.Store) JDOPersistenceManager(org.datanucleus.api.jdo.JDOPersistenceManager) Field(java.lang.reflect.Field) JDOPersistenceManagerFactory(org.datanucleus.api.jdo.JDOPersistenceManagerFactory) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) SQLIntegrityConstraintViolationException(java.sql.SQLIntegrityConstraintViolationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) JDOCanRetryException(javax.jdo.JDOCanRetryException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) JDOException(javax.jdo.JDOException) MissingTableException(org.datanucleus.store.rdbms.exceptions.MissingTableException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) JDODataStoreException(javax.jdo.JDODataStoreException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) Map(java.util.Map) WeakValueMap(org.datanucleus.util.WeakValueMap) MRoleMap(org.apache.hadoop.hive.metastore.model.MRoleMap) HashMap(java.util.HashMap)

Aggregations

IOException (java.io.IOException)1 Field (java.lang.reflect.Field)1 SQLException (java.sql.SQLException)1 SQLIntegrityConstraintViolationException (java.sql.SQLIntegrityConstraintViolationException)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Map (java.util.Map)1 Set (java.util.Set)1 JDOCanRetryException (javax.jdo.JDOCanRetryException)1 JDODataStoreException (javax.jdo.JDODataStoreException)1 JDOException (javax.jdo.JDOException)1 JDOObjectNotFoundException (javax.jdo.JDOObjectNotFoundException)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)1 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)1 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)1 InvalidPartitionException (org.apache.hadoop.hive.metastore.api.InvalidPartitionException)1 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)1 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)1 PrincipalPrivilegeSet (org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet)1