Search in sources :

Example 6 with JDOConnection

use of javax.jdo.datastore.JDOConnection in project hive by apache.

the class DirectSqlUpdateStat method updatePartitionColumnStatistics.

/**
 * Update the statistics for the given partitions. Add the notification logs also.
 * @return map of partition key to column stats if successful, null otherwise.
 */
public Map<String, Map<String, String>> updatePartitionColumnStatistics(Map<String, ColumnStatistics> partColStatsMap, Table tbl, long csId, String validWriteIds, long writeId, List<TransactionalMetaStoreEventListener> transactionalListeners) throws MetaException {
    JDOConnection jdoConn = null;
    Connection dbConn = null;
    boolean committed = false;
    try {
        lockInternal();
        jdoConn = pm.getDataStoreConnection();
        dbConn = (Connection) (jdoConn.getNativeConnection());
        setAnsiQuotes(dbConn);
        Map<PartitionInfo, ColumnStatistics> partitionInfoMap = getPartitionInfo(dbConn, tbl.getId(), partColStatsMap);
        Map<String, Map<String, String>> result = updatePartitionParamTable(dbConn, partitionInfoMap, validWriteIds, writeId, TxnUtils.isAcidTable(tbl));
        Map<PartColNameInfo, MPartitionColumnStatistics> insertMap = new HashMap<>();
        Map<PartColNameInfo, MPartitionColumnStatistics> updateMap = new HashMap<>();
        populateInsertUpdateMap(partitionInfoMap, updateMap, insertMap, dbConn);
        LOG.info("Number of stats to insert  " + insertMap.size() + " update " + updateMap.size());
        if (insertMap.size() != 0) {
            insertIntoPartColStatTable(insertMap, csId, dbConn);
        }
        if (updateMap.size() != 0) {
            updatePartColStatTable(updateMap, dbConn);
        }
        if (transactionalListeners != null) {
            UpdatePartitionColumnStatEventBatch eventBatch = new UpdatePartitionColumnStatEventBatch(null);
            for (Map.Entry entry : result.entrySet()) {
                Map<String, String> parameters = (Map<String, String>) entry.getValue();
                ColumnStatistics colStats = partColStatsMap.get(entry.getKey());
                List<String> partVals = getPartValsFromName(tbl, colStats.getStatsDesc().getPartName());
                UpdatePartitionColumnStatEvent event = new UpdatePartitionColumnStatEvent(colStats, partVals, parameters, tbl, writeId, null);
                eventBatch.addPartColStatEvent(event);
            }
            MetaStoreListenerNotifier.notifyEventWithDirectSql(transactionalListeners, EventMessage.EventType.UPDATE_PARTITION_COLUMN_STAT_BATCH, eventBatch, dbConn, sqlGenerator);
        }
        dbConn.commit();
        committed = true;
        return result;
    } catch (Exception e) {
        LOG.error("Unable to update Column stats for  " + tbl.getTableName(), e);
        throw new MetaException("Unable to update Column stats for  " + tbl.getTableName() + " due to: " + e.getMessage());
    } finally {
        if (!committed) {
            rollbackDBConn(dbConn);
        }
        closeDbConn(jdoConn);
        unlockInternal();
    }
}
Also used : MPartitionColumnStatistics(org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics) ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) HashMap(java.util.HashMap) Connection(java.sql.Connection) JDOConnection(javax.jdo.datastore.JDOConnection) JDOConnection(javax.jdo.datastore.JDOConnection) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SQLException(java.sql.SQLException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) UpdatePartitionColumnStatEvent(org.apache.hadoop.hive.metastore.events.UpdatePartitionColumnStatEvent) HashMap(java.util.HashMap) Map(java.util.Map) UpdatePartitionColumnStatEventBatch(org.apache.hadoop.hive.metastore.events.UpdatePartitionColumnStatEventBatch) MPartitionColumnStatistics(org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 7 with JDOConnection

use of javax.jdo.datastore.JDOConnection in project hive by apache.

the class DirectSqlUpdateStat method getNextCSIdForMPartitionColumnStatistics.

/**
 * Gets the next CS id from sequence MPartitionColumnStatistics and increment the CS id by numStats.
 * @return The CD id before update.
 */
public long getNextCSIdForMPartitionColumnStatistics(long numStats) throws MetaException {
    Statement statement = null;
    ResultSet rs = null;
    long maxCsId = 0;
    boolean committed = false;
    Connection dbConn = null;
    JDOConnection jdoConn = null;
    try {
        lockInternal();
        jdoConn = pm.getDataStoreConnection();
        dbConn = (Connection) (jdoConn.getNativeConnection());
        setAnsiQuotes(dbConn);
        // This loop will be iterated at max twice. If there is no records, it will first insert and then do a select.
        // We are not using any upsert operations as select for update and then update is required to make sure that
        // the caller gets a reserved range for CSId not used by any other thread.
        boolean insertDone = false;
        while (maxCsId == 0) {
            String query = "SELECT \"NEXT_VAL\" FROM \"SEQUENCE_TABLE\" WHERE \"SEQUENCE_NAME\"= " + quoteString("org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics") + " FOR UPDATE";
            LOG.debug("Going to execute query " + query);
            statement = dbConn.createStatement();
            rs = statement.executeQuery(query);
            if (rs.next()) {
                maxCsId = rs.getLong(1);
            } else if (insertDone) {
                throw new MetaException("Invalid state of SEQUENCE_TABLE for MPartitionColumnStatistics");
            } else {
                insertDone = true;
                closeStmt(statement);
                statement = dbConn.createStatement();
                query = "INSERT INTO \"SEQUENCE_TABLE\" (\"SEQUENCE_NAME\", \"NEXT_VAL\")  VALUES ( " + quoteString("org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics") + "," + 1 + ")";
                try {
                    statement.executeUpdate(query);
                } catch (SQLException e) {
                    // If the record is already inserted by some other thread continue to select.
                    if (dbType.isDuplicateKeyError(e)) {
                        continue;
                    }
                    LOG.error("Unable to insert into SEQUENCE_TABLE for MPartitionColumnStatistics.", e);
                    throw e;
                } finally {
                    closeStmt(statement);
                }
            }
        }
        long nextMaxCsId = maxCsId + numStats + 1;
        closeStmt(statement);
        statement = dbConn.createStatement();
        String query = "UPDATE \"SEQUENCE_TABLE\" SET \"NEXT_VAL\" = " + nextMaxCsId + " WHERE \"SEQUENCE_NAME\" = " + quoteString("org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics");
        statement.executeUpdate(query);
        dbConn.commit();
        committed = true;
        return maxCsId;
    } catch (Exception e) {
        LOG.error("Unable to getNextCSIdForMPartitionColumnStatistics", e);
        throw new MetaException("Unable to getNextCSIdForMPartitionColumnStatistics  " + " due to: " + e.getMessage());
    } finally {
        if (!committed) {
            rollbackDBConn(dbConn);
        }
        close(rs, statement, jdoConn);
        unlockInternal();
    }
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) Connection(java.sql.Connection) JDOConnection(javax.jdo.datastore.JDOConnection) JDOConnection(javax.jdo.datastore.JDOConnection) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SQLException(java.sql.SQLException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 8 with JDOConnection

use of javax.jdo.datastore.JDOConnection in project motech by motech.

the class MdsDiskSpaceUsageIT method testEudeDiskSpaceUsage.

@Test
public void testEudeDiskSpaceUsage() throws IOException, IllegalAccessException, ClassNotFoundException, InstantiationException, SQLException {
    LOGGER.info("Creating entity");
    generator.generateDummyEntities(ENTITIES, FIELDS, LOOKUPS, true);
    EntityDto entityDto = entityService.getEntityByClassName(Constants.Packages.ENTITY.concat(".").concat(generator.getEntityPrefix()).concat("0"));
    LOGGER.info("Creating {} instances for entity", INSTANCES);
    generator.generateDummyInstances(entityDto.getId(), INSTANCES);
    WebApplicationContext context = ServiceRetriever.getWebAppContext(bundleContext, MDS_BUNDLE_SYMBOLIC_NAME);
    LocalPersistenceManagerFactoryBean dataPersistenceManagerFactoryBean = (LocalPersistenceManagerFactoryBean) context.getBean(BeanFactory.FACTORY_BEAN_PREFIX + "dataPersistenceManagerFactoryBean");
    LocalPersistenceManagerFactoryBean schemaPersistenceManagerFactoryBean = (LocalPersistenceManagerFactoryBean) context.getBean(BeanFactory.FACTORY_BEAN_PREFIX + "persistenceManagerFactoryBean");
    PersistenceManagerFactory dataPersistenceManagerFactory = dataPersistenceManagerFactoryBean.getObject();
    PersistenceManagerFactory schemaPersistenceManagerFactory = schemaPersistenceManagerFactoryBean.getObject();
    JDOConnection dataCon = dataPersistenceManagerFactory.getPersistenceManager().getDataStoreConnection();
    JDOConnection schemaCon = schemaPersistenceManagerFactory.getPersistenceManager().getDataStoreConnection();
    Connection dataNativeCon = (Connection) dataCon.getNativeConnection();
    Connection schemaNativeCon = (Connection) schemaCon.getNativeConnection();
    Statement dataStmt = dataNativeCon.createStatement();
    Statement schemaStmt = schemaNativeCon.createStatement();
    ResultSet dataResultSet = dataStmt.executeQuery(String.format(SQLQUERY, "motechdata"));
    dataResultSet.absolute(1);
    double spaceUsage = dataResultSet.getDouble("MB");
    ResultSet schemaResultSet = schemaStmt.executeQuery(String.format(SQLQUERY, "motechschema"));
    schemaResultSet.absolute(1);
    spaceUsage += schemaResultSet.getDouble("MB");
    LOGGER.info("Disk space usage of Motech Data Services database after creating {} instances is {} MB", INSTANCES, spaceUsage);
    logToFile(spaceUsage);
    Bundle entitiesBundle = OsgiBundleUtils.findBundleBySymbolicName(bundleContext, MDS_ENTITIES_SYMBOLIC_NAME);
    MotechDataService service = generator.getService(entitiesBundle.getBundleContext(), entityDto.getClassName());
    service.deleteAll();
}
Also used : EntityDto(org.motechproject.mds.dto.EntityDto) Statement(java.sql.Statement) Bundle(org.osgi.framework.Bundle) LocalPersistenceManagerFactoryBean(org.springframework.orm.jdo.LocalPersistenceManagerFactoryBean) Connection(java.sql.Connection) JDOConnection(javax.jdo.datastore.JDOConnection) ResultSet(java.sql.ResultSet) PersistenceManagerFactory(javax.jdo.PersistenceManagerFactory) JDOConnection(javax.jdo.datastore.JDOConnection) MotechDataService(org.motechproject.mds.service.MotechDataService) WebApplicationContext(org.springframework.web.context.WebApplicationContext) Test(org.junit.Test)

Example 9 with JDOConnection

use of javax.jdo.datastore.JDOConnection in project tests by datanucleus.

the class PersistenceManagerTest method testJDOConnection.

/**
 * Test for access to the JDO connection using its JDO interface acccessor.
 * TODO parts of this are RDBMS-specific currently
 */
public void testJDOConnection() {
    PersistenceManager pm = pmf.getPersistenceManager();
    Transaction tx = pm.currentTransaction();
    JDOConnection jdoConn = null;
    if (vendorID == null) {
        // This is not an SQL-based datastore so omit
        return;
    }
    try {
        // ---------------------------------------------------
        // test normal scenario
        // ---------------------------------------------------
        tx.begin();
        jdoConn = pm.getDataStoreConnection();
        Connection sqlConn = (Connection) jdoConn;
        try {
            sqlConn.close();
            tx.commit();
        } catch (JDOUserException e) {
            fail("not expected JDOUserException");
        } catch (SQLException e) {
            fail("not expected SQLException");
        }
        assertFalse("tx should not be active", tx.isActive());
        // ---------------------------------------------------
        // test commit with datastore txn and no close
        // ---------------------------------------------------
        tx.begin();
        jdoConn = pm.getDataStoreConnection();
        try {
            tx.commit();
            fail("expected JDOUserException");
        } catch (JDOUserException e) {
        }
        assertTrue("tx should be active", tx.isActive());
        try {
            tx.rollback();
            fail("expected JDOUserException");
        } catch (JDOUserException e) {
        }
        assertTrue("tx should be active", tx.isActive());
        sqlConn = (Connection) jdoConn;
        try {
            sqlConn.close();
        } catch (SQLException e) {
            LOG.error(">> Exception thrown in test", e);
        }
        tx.commit();
        // ---------------------------------------------------
        // test commit with optimistic txn and no close
        // ---------------------------------------------------
        tx.setOptimistic(true);
        tx.begin();
        jdoConn = pm.getDataStoreConnection();
        try {
            tx.commit();
            fail("expected JDOUserException");
        } catch (JDOUserException e) {
        // expected
        }
        assertTrue("tx should be active", pm.currentTransaction().isActive());
    } finally {
        if (tx.isActive()) {
            try {
                if (jdoConn != null) {
                    jdoConn.close();
                }
            } catch (Exception e) {
            }
            tx.rollback();
        }
        pm.close();
    }
}
Also used : Transaction(javax.jdo.Transaction) JDOPersistenceManager(org.datanucleus.api.jdo.JDOPersistenceManager) PersistenceManager(javax.jdo.PersistenceManager) SQLException(java.sql.SQLException) Connection(java.sql.Connection) JDOConnection(javax.jdo.datastore.JDOConnection) JDOConnection(javax.jdo.datastore.JDOConnection) JDOUserException(javax.jdo.JDOUserException) TransactionNotActiveException(org.datanucleus.api.jdo.exceptions.TransactionNotActiveException) JDOException(javax.jdo.JDOException) JDOUserException(javax.jdo.JDOUserException) TransactionNotReadableException(org.datanucleus.api.jdo.exceptions.TransactionNotReadableException) SQLException(java.sql.SQLException) JDOUserCallbackException(javax.jdo.JDOUserCallbackException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) TransactionNotWritableException(org.datanucleus.api.jdo.exceptions.TransactionNotWritableException) JDOUnsupportedOptionException(javax.jdo.JDOUnsupportedOptionException)

Example 10 with JDOConnection

use of javax.jdo.datastore.JDOConnection in project tests by datanucleus.

the class TestRunListener method cleanupDatastore.

private void cleanupDatastore(int number) {
    String datastoreProtocol = TestHelper.getDatastorePluginProtocol(number);
    if (// Avoid creating PMF when clean up may not be supported
    datastoreProtocol != null && !datastoreProtocol.equals("jdbc")) {
        LOG.info("Datastore clean up not supported for datastore=" + datastoreProtocol);
        return;
    }
    JDOPersistenceManagerFactory pmf = (JDOPersistenceManagerFactory) JDOPersistenceTestCase.getPMF(number, null);
    PersistenceNucleusContext ctx = pmf.getNucleusContext();
    if (ctx.getStoreManager() instanceof RDBMSStoreManager) {
        PersistenceManager pm = null;
        JDOConnection jdoConnection = null;
        Connection nativeConnection = null;
        try {
            pm = pmf.getPersistenceManager();
            jdoConnection = pm.getDataStoreConnection();
            // Obtain the connection via PM instead of using the PMF properties so we support DataSource too
            nativeConnection = (Connection) jdoConnection.getNativeConnection();
            cleanupRDBMSdatastore(nativeConnection);
        } catch (SQLException e) {
            LOG.error("Error during datastore clean up", e);
        } finally {
            if (jdoConnection != null) {
                jdoConnection.close();
            }
            if (pm != null) {
                pm.close();
            }
        }
    } else {
        // TODO Support cleanup of mongodb, Cassandra, Excel, ODF etc
        LOG.info("Datastore clean up not supported");
    }
    pmf.close();
}
Also used : PersistenceManager(javax.jdo.PersistenceManager) SQLException(java.sql.SQLException) PersistenceNucleusContext(org.datanucleus.PersistenceNucleusContext) Connection(java.sql.Connection) JDOConnection(javax.jdo.datastore.JDOConnection) JDOConnection(javax.jdo.datastore.JDOConnection) JDOPersistenceManagerFactory(org.datanucleus.api.jdo.JDOPersistenceManagerFactory) RDBMSStoreManager(org.datanucleus.store.rdbms.RDBMSStoreManager)

Aggregations

JDOConnection (javax.jdo.datastore.JDOConnection)11 Connection (java.sql.Connection)8 SQLException (java.sql.SQLException)6 Statement (java.sql.Statement)6 ResultSet (java.sql.ResultSet)4 PersistenceManager (javax.jdo.PersistenceManager)4 JDOObjectNotFoundException (javax.jdo.JDOObjectNotFoundException)3 Transaction (javax.jdo.Transaction)3 JDOException (javax.jdo.JDOException)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)2 DB (com.mongodb.DB)1 PreparedStatement (java.sql.PreparedStatement)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 JDOOptimisticVerificationException (javax.jdo.JDOOptimisticVerificationException)1 JDOUnsupportedOptionException (javax.jdo.JDOUnsupportedOptionException)1 JDOUserCallbackException (javax.jdo.JDOUserCallbackException)1 JDOUserException (javax.jdo.JDOUserException)1 PersistenceManagerFactory (javax.jdo.PersistenceManagerFactory)1