use of javax.jdo.PersistenceManagerFactory in project metacat by Netflix.
the class HivePersistenceManagerFactory method getpersistencemanagerfactory.
private static synchronized PersistenceManagerFactory getpersistencemanagerfactory(final Map props) {
final String name = String.valueOf(props.get(HiveConfigConstants.JAVAX_JDO_OPTION_NAME));
PersistenceManagerFactory result = factories.get(name);
if (result == null) {
final DataSource dataSource = DataSourceManager.get().get(name);
final Map<String, Object> properties = Maps.newHashMap();
properties.put(HiveConfigConstants.DATANUCLEUS_FIXEDDATASTORE, props.getOrDefault(HiveConfigConstants.DATANUCLEUS_FIXEDDATASTORE, true));
properties.put(HiveConfigConstants.DATANUCLEUS_AUTOCREATESCHEMA, props.getOrDefault(HiveConfigConstants.DATANUCLEUS_AUTOCREATESCHEMA, false));
properties.put(HiveConfigConstants.DATANUCLEUS_RDBMS_CHECKEXISTTABLESORVIEWS, props.getOrDefault(HiveConfigConstants.DATANUCLEUS_RDBMS_CHECKEXISTTABLESORVIEWS, false));
properties.put(HiveConfigConstants.DATANUCLEUS_RDBMS_INITIALIZECOULUMNINFO, props.getOrDefault(HiveConfigConstants.DATANUCLEUS_RDBMS_INITIALIZECOULUMNINFO, "None"));
properties.put(HiveConfigConstants.DATANUCLEUS_IDENTIFIERFACTORY, HiveConfigConstants.DATANUCLEUS_DATANUCLEU1);
properties.put(HiveConfigConstants.DATANUCLEUS_CONNECTIONFACTORY, dataSource);
properties.put(HiveConfigConstants.DATANUCLEUS_RDBMS_USELEGACYNATIVEVALUESTRATEGY, true);
properties.put(HiveConfigConstants.DATANUCLEUS_TRANSACTIONISOLATION, HiveConfigConstants.DATANUCLEUS_READCOMMITTED);
properties.put(HiveConfigConstants.DATANUCLEUS_VALIDATETABLE, false);
properties.put(HiveConfigConstants.DATANUCLEUS_VALIDATECONSTRAINTS, false);
properties.put(HiveConfigConstants.DATANUCLEUS_VALIDATECOLUMNS, false);
properties.put(HiveConfigConstants.DATANUCLEUS_CACHE_LEVEL2, false);
properties.put(HiveConfigConstants.DATANUCLEUS_CACHE_LEVEL2_TYPE, "none");
properties.put(HiveConfigConstants.DATANUCLEUS_PERSISTENCYBYREACHATCOMMIT, false);
properties.put(HiveConfigConstants.DATANUCLEUS_AUTOSTARTMECHANISMMODE, "Checked");
properties.put(HiveConfigConstants.DATANUCLEUS_DETACHALLONCOMMIT, true);
properties.put(HiveConfigConstants.DATANUCLEUS_DETACHALLONROLLBACK, true);
properties.put(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT, props.get(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT));
properties.put(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT, props.get(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT));
properties.put(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT, props.get(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT));
result = JDOPersistenceManagerFactory.getPersistenceManagerFactory(properties);
factories.put(name, result);
}
return result;
}
use of javax.jdo.PersistenceManagerFactory in project hive by apache.
the class ObjectStore method unCacheDataNucleusClassLoaders.
/**
* Removed cached classloaders from DataNucleus
* DataNucleus caches classloaders in NucleusContext.
* In UDFs, this can result in classloaders not getting GCed resulting in PermGen leaks.
* This is particularly an issue when using embedded metastore with HiveServer2,
* since the current classloader gets modified with each new add jar,
* becoming the classloader for downstream classes, which DataNucleus ends up using.
* The NucleusContext cache gets freed up only on calling a close on it.
* We're not closing NucleusContext since it does a bunch of other things which we don't want.
* We're not clearing the cache HashMap by calling HashMap#clear to avoid concurrency issues.
*/
public static void unCacheDataNucleusClassLoaders() {
PersistenceManagerFactory pmf = ObjectStore.getPMF();
clearOutPmfClassLoaderCache(pmf);
}
use of javax.jdo.PersistenceManagerFactory in project motech by motech.
the class MdsDiskSpaceUsageIT method testEudeDiskSpaceUsage.
@Test
public void testEudeDiskSpaceUsage() throws IOException, IllegalAccessException, ClassNotFoundException, InstantiationException, SQLException {
LOGGER.info("Creating entity");
generator.generateDummyEntities(ENTITIES, FIELDS, LOOKUPS, true);
EntityDto entityDto = entityService.getEntityByClassName(Constants.Packages.ENTITY.concat(".").concat(generator.getEntityPrefix()).concat("0"));
LOGGER.info("Creating {} instances for entity", INSTANCES);
generator.generateDummyInstances(entityDto.getId(), INSTANCES);
WebApplicationContext context = ServiceRetriever.getWebAppContext(bundleContext, MDS_BUNDLE_SYMBOLIC_NAME);
LocalPersistenceManagerFactoryBean dataPersistenceManagerFactoryBean = (LocalPersistenceManagerFactoryBean) context.getBean(BeanFactory.FACTORY_BEAN_PREFIX + "dataPersistenceManagerFactoryBean");
LocalPersistenceManagerFactoryBean schemaPersistenceManagerFactoryBean = (LocalPersistenceManagerFactoryBean) context.getBean(BeanFactory.FACTORY_BEAN_PREFIX + "persistenceManagerFactoryBean");
PersistenceManagerFactory dataPersistenceManagerFactory = dataPersistenceManagerFactoryBean.getObject();
PersistenceManagerFactory schemaPersistenceManagerFactory = schemaPersistenceManagerFactoryBean.getObject();
JDOConnection dataCon = dataPersistenceManagerFactory.getPersistenceManager().getDataStoreConnection();
JDOConnection schemaCon = schemaPersistenceManagerFactory.getPersistenceManager().getDataStoreConnection();
Connection dataNativeCon = (Connection) dataCon.getNativeConnection();
Connection schemaNativeCon = (Connection) schemaCon.getNativeConnection();
Statement dataStmt = dataNativeCon.createStatement();
Statement schemaStmt = schemaNativeCon.createStatement();
ResultSet dataResultSet = dataStmt.executeQuery(String.format(SQLQUERY, "motechdata"));
dataResultSet.absolute(1);
double spaceUsage = dataResultSet.getDouble("MB");
ResultSet schemaResultSet = schemaStmt.executeQuery(String.format(SQLQUERY, "motechschema"));
schemaResultSet.absolute(1);
spaceUsage += schemaResultSet.getDouble("MB");
LOGGER.info("Disk space usage of Motech Data Services database after creating {} instances is {} MB", INSTANCES, spaceUsage);
logToFile(spaceUsage);
Bundle entitiesBundle = OsgiBundleUtils.findBundleBySymbolicName(bundleContext, MDS_ENTITIES_SYMBOLIC_NAME);
MotechDataService service = generator.getService(entitiesBundle.getBundleContext(), entityDto.getClassName());
service.deleteAll();
}
use of javax.jdo.PersistenceManagerFactory in project tutorials by eugenp.
the class GuideToJDO method ListProducts.
@SuppressWarnings({ "rawtypes", "unchecked" })
public void ListProducts() {
PersistenceManagerFactory pmf = new JDOPersistenceManagerFactory(pumd, null);
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try {
tx.begin();
Query q = pm.newQuery("SELECT FROM " + Product.class.getName() + " WHERE price > 10");
List<Product> products = (List<Product>) q.execute();
Iterator<Product> iter = products.iterator();
while (iter.hasNext()) {
Product p = iter.next();
LOGGER.log(Level.WARNING, "Product name: {0} - Price: {1}", new Object[] { p.name, p.price });
}
LOGGER.log(Level.INFO, "--------------------------------------------------------------");
tx.commit();
} finally {
if (tx.isActive()) {
tx.rollback();
}
pm.close();
}
}
use of javax.jdo.PersistenceManagerFactory in project tutorials by eugenp.
the class GuideToJDO method listXMLProducts.
@SuppressWarnings({ "rawtypes", "unchecked" })
public void listXMLProducts() {
PersistenceManagerFactory pmf = new JDOPersistenceManagerFactory(pumdXML, null);
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try {
tx.begin();
Query q = pm.newQuery("SELECT FROM " + ProductXML.class.getName());
List<ProductXML> products = (List<ProductXML>) q.execute();
Iterator<ProductXML> iter = products.iterator();
while (iter.hasNext()) {
ProductXML p = iter.next();
LOGGER.log(Level.WARNING, "Product name: {0} - Price: {1}", new Object[] { p.getName(), p.getPrice() });
pm.deletePersistent(p);
}
LOGGER.log(Level.INFO, "--------------------------------------------------------------");
tx.commit();
} finally {
if (tx.isActive()) {
tx.rollback();
}
pm.close();
}
}
Aggregations