Search in sources :

Example 1 with AbstractReturningWork

use of org.hibernate.jdbc.AbstractReturningWork in project hibernate-orm by hibernate.

the class BaseCoreFunctionalTestCase method readCommittedIsolationMaintained.

protected boolean readCommittedIsolationMaintained(String scenario) {
    int isolation = java.sql.Connection.TRANSACTION_READ_UNCOMMITTED;
    Session testSession = null;
    try {
        testSession = openSession();
        isolation = testSession.doReturningWork(new AbstractReturningWork<Integer>() {

            @Override
            public Integer execute(Connection connection) throws SQLException {
                return connection.getTransactionIsolation();
            }
        });
    } catch (Throwable ignore) {
    } finally {
        if (testSession != null) {
            try {
                testSession.close();
            } catch (Throwable ignore) {
            }
        }
    }
    if (isolation < java.sql.Connection.TRANSACTION_READ_COMMITTED) {
        SkipLog.reportSkip("environment does not support at least read committed isolation", scenario);
        return false;
    } else {
        return true;
    }
}
Also used : AbstractReturningWork(org.hibernate.jdbc.AbstractReturningWork) Connection(java.sql.Connection) Session(org.hibernate.Session)

Example 2 with AbstractReturningWork

use of org.hibernate.jdbc.AbstractReturningWork in project hibernate-orm by hibernate.

the class TableGenerator method generate.

@Override
public Serializable generate(final SharedSessionContractImplementor session, final Object obj) {
    final SqlStatementLogger statementLogger = session.getFactory().getServiceRegistry().getService(JdbcServices.class).getSqlStatementLogger();
    final SessionEventListenerManager statsCollector = session.getEventListenerManager();
    return optimizer.generate(new AccessCallback() {

        @Override
        public IntegralDataTypeHolder getNextValue() {
            return session.getTransactionCoordinator().createIsolationDelegate().delegateWork(new AbstractReturningWork<IntegralDataTypeHolder>() {

                @Override
                public IntegralDataTypeHolder execute(Connection connection) throws SQLException {
                    final IntegralDataTypeHolder value = makeValue();
                    int rows;
                    do {
                        try (PreparedStatement selectPS = prepareStatement(connection, selectQuery, statementLogger, statsCollector)) {
                            selectPS.setString(1, segmentValue);
                            final ResultSet selectRS = executeQuery(selectPS, statsCollector);
                            if (!selectRS.next()) {
                                long initializationValue;
                                if (storeLastUsedValue) {
                                    initializationValue = initialValue - 1;
                                } else {
                                    initializationValue = initialValue;
                                }
                                value.initialize(initializationValue);
                                try (PreparedStatement insertPS = prepareStatement(connection, insertQuery, statementLogger, statsCollector)) {
                                    LOG.tracef("binding parameter [%s] - [%s]", 1, segmentValue);
                                    insertPS.setString(1, segmentValue);
                                    value.bind(insertPS, 2);
                                    executeUpdate(insertPS, statsCollector);
                                }
                            } else {
                                int defaultValue;
                                if (storeLastUsedValue) {
                                    defaultValue = 0;
                                } else {
                                    defaultValue = 1;
                                }
                                value.initialize(selectRS, defaultValue);
                            }
                            selectRS.close();
                        } catch (SQLException e) {
                            LOG.unableToReadOrInitHiValue(e);
                            throw e;
                        }
                        try (PreparedStatement updatePS = prepareStatement(connection, updateQuery, statementLogger, statsCollector)) {
                            final IntegralDataTypeHolder updateValue = value.copy();
                            if (optimizer.applyIncrementSizeToSourceValues()) {
                                updateValue.add(incrementSize);
                            } else {
                                updateValue.increment();
                            }
                            updateValue.bind(updatePS, 1);
                            value.bind(updatePS, 2);
                            updatePS.setString(3, segmentValue);
                            rows = executeUpdate(updatePS, statsCollector);
                        } catch (SQLException e) {
                            LOG.unableToUpdateQueryHiValue(renderedTableName, e);
                            throw e;
                        }
                    } while (rows == 0);
                    accessCount++;
                    if (storeLastUsedValue) {
                        return value.increment();
                    } else {
                        return value;
                    }
                }
            }, true);
        }

        @Override
        public String getTenantIdentifier() {
            return session.getTenantIdentifier();
        }
    });
}
Also used : AbstractReturningWork(org.hibernate.jdbc.AbstractReturningWork) SQLException(java.sql.SQLException) Connection(java.sql.Connection) JdbcServices(org.hibernate.engine.jdbc.spi.JdbcServices) PreparedStatement(java.sql.PreparedStatement) ResultSet(java.sql.ResultSet) SqlStatementLogger(org.hibernate.engine.jdbc.spi.SqlStatementLogger) SessionEventListenerManager(org.hibernate.engine.spi.SessionEventListenerManager) IntegralDataTypeHolder(org.hibernate.id.IntegralDataTypeHolder)

Example 3 with AbstractReturningWork

use of org.hibernate.jdbc.AbstractReturningWork in project hibernate-orm by hibernate.

the class MultipleHiLoPerTableGenerator method generate.

public synchronized Serializable generate(final SharedSessionContractImplementor session, Object obj) {
    DeprecationLogger.DEPRECATION_LOGGER.deprecatedTableGenerator(getClass().getName());
    final SqlStatementLogger statementLogger = session.getFactory().getServiceRegistry().getService(JdbcServices.class).getSqlStatementLogger();
    final SessionEventListenerManager statsCollector = session.getEventListenerManager();
    final WorkExecutorVisitable<IntegralDataTypeHolder> work = new AbstractReturningWork<IntegralDataTypeHolder>() {

        @Override
        public IntegralDataTypeHolder execute(Connection connection) throws SQLException {
            IntegralDataTypeHolder value = IdentifierGeneratorHelper.getIntegralDataTypeHolder(returnClass);
            int rows;
            do {
                final PreparedStatement queryPreparedStatement = prepareStatement(connection, query, statementLogger, statsCollector);
                try {
                    final ResultSet rs = executeQuery(queryPreparedStatement, statsCollector);
                    boolean isInitialized = rs.next();
                    if (!isInitialized) {
                        value.initialize(0);
                        final PreparedStatement insertPreparedStatement = prepareStatement(connection, insert, statementLogger, statsCollector);
                        try {
                            value.bind(insertPreparedStatement, 1);
                            executeUpdate(insertPreparedStatement, statsCollector);
                        } finally {
                            insertPreparedStatement.close();
                        }
                    } else {
                        value.initialize(rs, 0);
                    }
                    rs.close();
                } catch (SQLException sqle) {
                    LOG.unableToReadOrInitHiValue(sqle);
                    throw sqle;
                } finally {
                    queryPreparedStatement.close();
                }
                final PreparedStatement updatePreparedStatement = prepareStatement(connection, update, statementLogger, statsCollector);
                try {
                    value.copy().increment().bind(updatePreparedStatement, 1);
                    value.bind(updatePreparedStatement, 2);
                    rows = executeUpdate(updatePreparedStatement, statsCollector);
                } catch (SQLException sqle) {
                    LOG.error(LOG.unableToUpdateHiValue(tableName), sqle);
                    throw sqle;
                } finally {
                    updatePreparedStatement.close();
                }
            } while (rows == 0);
            return value;
        }
    };
    // maxLo < 1 indicates a hilo generator with no hilo :?
    if (maxLo < 1) {
        // keep the behavior consistent even for boundary usages
        IntegralDataTypeHolder value = null;
        while (value == null || value.lt(1)) {
            value = session.getTransactionCoordinator().createIsolationDelegate().delegateWork(work, true);
        }
        return value.makeValue();
    }
    return hiloOptimizer.generate(new AccessCallback() {

        public IntegralDataTypeHolder getNextValue() {
            return session.getTransactionCoordinator().createIsolationDelegate().delegateWork(work, true);
        }

        @Override
        public String getTenantIdentifier() {
            return session.getTenantIdentifier();
        }
    });
}
Also used : AbstractReturningWork(org.hibernate.jdbc.AbstractReturningWork) SQLException(java.sql.SQLException) Connection(java.sql.Connection) JdbcServices(org.hibernate.engine.jdbc.spi.JdbcServices) PreparedStatement(java.sql.PreparedStatement) AccessCallback(org.hibernate.id.enhanced.AccessCallback) ResultSet(java.sql.ResultSet) SqlStatementLogger(org.hibernate.engine.jdbc.spi.SqlStatementLogger) SessionEventListenerManager(org.hibernate.engine.spi.SessionEventListenerManager)

Example 4 with AbstractReturningWork

use of org.hibernate.jdbc.AbstractReturningWork in project openremote by openremote.

the class AbstractDatapointService method getDatapointPeriod.

public DatapointPeriod getDatapointPeriod(String assetId, String attributeName) {
    return persistenceService.doReturningTransaction(em -> em.unwrap(Session.class).doReturningWork(new AbstractReturningWork<DatapointPeriod>() {

        @Override
        public DatapointPeriod execute(Connection connection) throws SQLException {
            String tableName = getDatapointTableName();
            String query = "SELECT DISTINCT periods.* FROM " + "(SELECT entity_id, attribute_name, " + "MIN(timestamp) AS oldestTimestamp, MAX(timestamp) AS latestTimestamp " + "FROM " + tableName + " GROUP BY entity_id, attribute_name) AS periods " + "INNER JOIN " + tableName + " ON " + tableName + ".entity_id = periods.entity_id AND " + tableName + ".attribute_name = periods.attribute_name " + "WHERE " + tableName + ".entity_id = ? " + "AND " + tableName + ".attribute_name = ? ";
            try (PreparedStatement st = connection.prepareStatement(query)) {
                st.setString(1, assetId);
                st.setString(2, attributeName);
                try (ResultSet rs = st.executeQuery()) {
                    if (rs.next()) {
                        return new DatapointPeriod(rs.getString(1), rs.getString(2), rs.getTimestamp(3).getTime(), rs.getTimestamp(4).getTime());
                    }
                    return new DatapointPeriod(assetId, attributeName, null, null);
                }
            }
        }
    }));
}
Also used : DatapointPeriod(org.openremote.model.datapoint.DatapointPeriod) AbstractReturningWork(org.hibernate.jdbc.AbstractReturningWork) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement)

Example 5 with AbstractReturningWork

use of org.hibernate.jdbc.AbstractReturningWork in project openremote by openremote.

the class AssetDatapointService method aggregateDatapoints.

public NumberDatapoint[] aggregateDatapoints(AssetAttribute attribute, DatapointInterval datapointInterval, long timestamp) {
    LOG.fine("Aggregating datapoints for: " + attribute);
    AttributeRef attributeRef = attribute.getReferenceOrThrow();
    return persistenceService.doReturningTransaction(entityManager -> entityManager.unwrap(Session.class).doReturningWork(new AbstractReturningWork<NumberDatapoint[]>() {

        @Override
        public NumberDatapoint[] execute(Connection connection) throws SQLException {
            String truncateX;
            String step;
            String interval;
            Function<Timestamp, String> labelFunction;
            SimpleDateFormat dayFormat = new SimpleDateFormat("dd. MMM yyyy");
            SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm");
            switch(datapointInterval) {
                case HOUR:
                    truncateX = "minute";
                    step = "1 minute";
                    interval = "1 hour";
                    labelFunction = timeFormat::format;
                    break;
                case DAY:
                    truncateX = "hour";
                    step = "1 hour";
                    interval = "1 day";
                    labelFunction = timeFormat::format;
                    break;
                case WEEK:
                    truncateX = "day";
                    step = "1 day";
                    interval = "7 day";
                    labelFunction = dayFormat::format;
                    break;
                case MONTH:
                    truncateX = "day";
                    step = "1 day";
                    interval = "1 month";
                    labelFunction = dayFormat::format;
                    break;
                case YEAR:
                    truncateX = "month";
                    step = "1 month";
                    interval = "1 year";
                    labelFunction = dayFormat::format;
                    break;
                default:
                    throw new IllegalArgumentException("Can't handle interval: " + datapointInterval);
            }
            StringBuilder query = new StringBuilder();
            query.append("select TS as X, coalesce(AVG_VALUE, null) as Y " + " from ( " + "       select date_trunc(?, GS)::timestamp TS " + "       from generate_series(to_timestamp(?) - ?, to_timestamp(?), ?) GS " + "       ) TS " + "  left join ( " + "       select " + "           date_trunc(?, to_timestamp(TIMESTAMP / 1000))::timestamp as TS, ");
            switch(attribute.getTypeOrThrow().getValueType()) {
                case NUMBER:
                    query.append(" AVG(VALUE::text::numeric) as AVG_VALUE ");
                    break;
                case BOOLEAN:
                    query.append(" AVG(case when VALUE::text::boolean is true then 1 else 0 end) as AVG_VALUE ");
                    break;
                default:
                    throw new IllegalArgumentException("Can't aggregate number datapoints for type of: " + attribute);
            }
            query.append(" from ASSET_DATAPOINT " + "         where " + "           to_timestamp(TIMESTAMP / 1000) >= to_timestamp(?) - ? " + "           and " + "           to_timestamp(TIMESTAMP / 1000) <= to_timestamp(?) " + "           and " + "           ENTITY_ID = ? and ATTRIBUTE_NAME = ? " + "         group by TS " + "  ) DP using (TS) " + " order by TS asc ");
            PreparedStatement st = connection.prepareStatement(query.toString());
            long timestampSeconds = timestamp / 1000;
            st.setString(1, truncateX);
            st.setLong(2, timestampSeconds);
            st.setObject(3, new PGInterval(interval));
            st.setLong(4, timestampSeconds);
            st.setObject(5, new PGInterval(step));
            st.setString(6, truncateX);
            st.setLong(7, timestampSeconds);
            st.setObject(8, new PGInterval(interval));
            st.setLong(9, timestampSeconds);
            st.setString(10, attributeRef.getEntityId());
            st.setString(11, attributeRef.getAttributeName());
            try (ResultSet rs = st.executeQuery()) {
                List<NumberDatapoint> result = new ArrayList<>();
                while (rs.next()) {
                    String label = labelFunction.apply(rs.getTimestamp(1));
                    Number value = rs.getObject(2) != null ? rs.getDouble(2) : null;
                    result.add(new NumberDatapoint(label, value));
                }
                return result.toArray(new NumberDatapoint[result.size()]);
            }
        }
    }));
}
Also used : AttributeRef(org.openremote.model.attribute.AttributeRef) AbstractReturningWork(org.hibernate.jdbc.AbstractReturningWork) ArrayList(java.util.ArrayList) PGInterval(org.postgresql.util.PGInterval) NumberDatapoint(org.openremote.model.datapoint.NumberDatapoint) SimpleDateFormat(java.text.SimpleDateFormat)

Aggregations

AbstractReturningWork (org.hibernate.jdbc.AbstractReturningWork)6 Connection (java.sql.Connection)5 PreparedStatement (java.sql.PreparedStatement)4 ResultSet (java.sql.ResultSet)4 SQLException (java.sql.SQLException)2 ArrayList (java.util.ArrayList)2 JdbcServices (org.hibernate.engine.jdbc.spi.JdbcServices)2 SqlStatementLogger (org.hibernate.engine.jdbc.spi.SqlStatementLogger)2 SessionEventListenerManager (org.hibernate.engine.spi.SessionEventListenerManager)2 AttributeRef (org.openremote.model.attribute.AttributeRef)2 PGInterval (org.postgresql.util.PGInterval)2 SimpleDateFormat (java.text.SimpleDateFormat)1 Session (org.hibernate.Session)1 IntegralDataTypeHolder (org.hibernate.id.IntegralDataTypeHolder)1 AccessCallback (org.hibernate.id.enhanced.AccessCallback)1 Datapoint (org.openremote.model.datapoint.Datapoint)1 DatapointPeriod (org.openremote.model.datapoint.DatapointPeriod)1 NumberDatapoint (org.openremote.model.datapoint.NumberDatapoint)1 ValueDatapoint (org.openremote.model.datapoint.ValueDatapoint)1 PGobject (org.postgresql.util.PGobject)1