Search in sources :

Example 61 with ExecutionContext

use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.

the class RDBMSPersistenceHandler method updateObject.

// ------------------------------ Update ----------------------------------
/**
 * Updates a persistent object in the database.
 * The update can take place in several steps, one update per table that it is stored in (depending on
 * which fields are updated).
 * e.g When updating an object that uses "new-table" inheritance for each level of the inheritance tree
 * then will get an UPDATE into each table. When updating an object that uses "complete-table"
 * inheritance then will get a single UPDATE into its table.
 * @param op The ObjectProvider of the object to be updated.
 * @param fieldNumbers The numbers of the fields to be updated.
 * @throws NucleusDataStoreException when an error occurs in the datastore communication
 */
public void updateObject(ObjectProvider op, int[] fieldNumbers) {
    // Check if read-only so update not permitted
    assertReadOnlyForUpdateOfObject(op);
    // Check if we need to do any updates to the schema before updating this object
    checkForSchemaUpdatesForFieldsOfObject(op, fieldNumbers);
    AbstractMemberMetaData[] mmds = null;
    if (fieldNumbers != null && fieldNumbers.length > 0) {
        // Convert the field numbers for this class into their metadata for the table
        ExecutionContext ec = op.getExecutionContext();
        mmds = new AbstractMemberMetaData[fieldNumbers.length];
        for (int i = 0; i < mmds.length; i++) {
            mmds[i] = op.getClassMetaData().getMetaDataForManagedMemberAtAbsolutePosition(fieldNumbers[i]);
        }
        if (ec.getStatistics() != null) {
            ec.getStatistics().incrementUpdateCount();
        }
        ClassLoaderResolver clr = ec.getClassLoaderResolver();
        DatastoreClass dc = getDatastoreClass(op.getObject().getClass().getName(), clr);
        updateObjectInTable(dc, op, clr, mmds);
    }
}
Also used : ExecutionContext(org.datanucleus.ExecutionContext) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) SecondaryDatastoreClass(org.datanucleus.store.rdbms.table.SecondaryDatastoreClass) DatastoreClass(org.datanucleus.store.rdbms.table.DatastoreClass) AbstractMemberMetaData(org.datanucleus.metadata.AbstractMemberMetaData)

Example 62 with ExecutionContext

use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.

the class RDBMSPersistenceHandler method deleteObject.

// ------------------------------ Delete ----------------------------------
/**
 * Deletes a persistent object from the database.
 * The delete can take place in several steps, one delete per table that it is stored in.
 * e.g When deleting an object that uses "new-table" inheritance for each level of the inheritance tree
 * then will get an DELETE for each table. When deleting an object that uses "complete-table"
 * inheritance then will get a single DELETE for its table.
 * @param op The ObjectProvider of the object to be deleted.
 * @throws NucleusDataStoreException when an error occurs in the datastore communication
 */
public void deleteObject(ObjectProvider op) {
    // Check if read-only so update not permitted
    assertReadOnlyForUpdateOfObject(op);
    ExecutionContext ec = op.getExecutionContext();
    if (ec.getStatistics() != null) {
        ec.getStatistics().incrementDeleteCount();
    }
    ClassLoaderResolver clr = op.getExecutionContext().getClassLoaderResolver();
    DatastoreClass dc = getDatastoreClass(op.getClassMetaData().getFullClassName(), clr);
    deleteObjectFromTable(dc, op, clr);
}
Also used : ExecutionContext(org.datanucleus.ExecutionContext) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) SecondaryDatastoreClass(org.datanucleus.store.rdbms.table.SecondaryDatastoreClass) DatastoreClass(org.datanucleus.store.rdbms.table.DatastoreClass)

Example 63 with ExecutionContext

use of org.datanucleus.ExecutionContext in project datanucleus-rdbms by datanucleus.

the class DynamicSchemaFieldManager method storeObjectField.

/**
 * Method to store an object field into the attached instance.
 * @param fieldNumber Number of the field to store
 * @param value the value in the detached instance
 */
public void storeObjectField(int fieldNumber, Object value) {
    if (value == null) {
        // No value so nothing to do
        return;
    }
    ExecutionContext ec = op.getExecutionContext();
    ClassLoaderResolver clr = ec.getClassLoaderResolver();
    AbstractMemberMetaData mmd = op.getClassMetaData().getMetaDataForManagedMemberAtAbsolutePosition(fieldNumber);
    if (mmd != null) {
        DatastoreClass table = rdbmsMgr.getDatastoreClass(op.getObject().getClass().getName(), clr);
        JavaTypeMapping fieldMapping = table.getMemberMapping(mmd);
        if (fieldMapping != null) {
            if (fieldMapping instanceof InterfaceMapping) {
                // 1-1 Interface field
                InterfaceMapping intfMapping = (InterfaceMapping) fieldMapping;
                if (mmd.getFieldTypes() != null || mmd.hasExtension(MetaData.EXTENSION_MEMBER_IMPLEMENTATION_CLASSES)) {
                    // Field is defined to not accept this type so just return
                    return;
                }
                processInterfaceMappingForValue(intfMapping, value, mmd, ec);
            } else if (mmd.hasCollection() || mmd.hasArray()) {
                boolean hasJoin = false;
                if (mmd.getJoinMetaData() != null) {
                    hasJoin = true;
                } else {
                    AbstractMemberMetaData[] relMmds = mmd.getRelatedMemberMetaData(clr);
                    if (relMmds != null && relMmds[0].getJoinMetaData() != null) {
                        hasJoin = true;
                    }
                }
                if (!hasJoin) {
                    // Not join table so no supported schema updates
                    return;
                }
                Table joinTbl = fieldMapping.getStoreManager().getTable(mmd);
                ElementContainerTable collTbl = (ElementContainerTable) joinTbl;
                JavaTypeMapping elemMapping = collTbl.getElementMapping();
                if (elemMapping instanceof InterfaceMapping) {
                    InterfaceMapping intfMapping = (InterfaceMapping) elemMapping;
                    if (mmd.hasCollection()) {
                        Collection coll = (Collection) value;
                        if (coll.isEmpty()) {
                            return;
                        }
                        // Update value mapping using first element. Maybe we should do the same for all elements?
                        Object elementValue = coll.iterator().next();
                        processInterfaceMappingForValue(intfMapping, elementValue, mmd, ec);
                    } else if (mmd.hasArray()) {
                        if (Array.getLength(value) == 0) {
                            return;
                        }
                        // Update value mapping using first element. Maybe we should do the same for all elements?
                        Object elementValue = Array.get(value, 0);
                        processInterfaceMappingForValue(intfMapping, elementValue, mmd, ec);
                    }
                }
            } else if (mmd.hasMap()) {
                boolean hasJoin = false;
                if (mmd.getJoinMetaData() != null) {
                    hasJoin = true;
                } else {
                    AbstractMemberMetaData[] relMmds = mmd.getRelatedMemberMetaData(clr);
                    if (relMmds != null && relMmds[0].getJoinMetaData() != null) {
                        hasJoin = true;
                    }
                }
                if (!hasJoin) {
                    // Not join table so no supported schema updates
                    return;
                }
                Map map = (Map) value;
                if (map.isEmpty()) {
                    return;
                }
                Table joinTbl = fieldMapping.getStoreManager().getTable(mmd);
                MapTable mapTbl = (MapTable) joinTbl;
                JavaTypeMapping keyMapping = mapTbl.getKeyMapping();
                if (keyMapping instanceof InterfaceMapping) {
                    // Update key mapping using first key. Maybe we should do the same for all keys?
                    InterfaceMapping intfMapping = (InterfaceMapping) keyMapping;
                    Object keyValue = map.keySet().iterator().next();
                    processInterfaceMappingForValue(intfMapping, keyValue, mmd, ec);
                }
                JavaTypeMapping valMapping = mapTbl.getValueMapping();
                if (valMapping instanceof InterfaceMapping) {
                    // Update value mapping using first value. Maybe we should do the same for all values?
                    InterfaceMapping intfMapping = (InterfaceMapping) valMapping;
                    Object valValue = map.values().iterator().next();
                    processInterfaceMappingForValue(intfMapping, valValue, mmd, ec);
                }
            }
        }
    }
}
Also used : ElementContainerTable(org.datanucleus.store.rdbms.table.ElementContainerTable) Table(org.datanucleus.store.rdbms.table.Table) MapTable(org.datanucleus.store.rdbms.table.MapTable) InterfaceMapping(org.datanucleus.store.rdbms.mapping.java.InterfaceMapping) JavaTypeMapping(org.datanucleus.store.rdbms.mapping.java.JavaTypeMapping) ClassLoaderResolver(org.datanucleus.ClassLoaderResolver) MapTable(org.datanucleus.store.rdbms.table.MapTable) ExecutionContext(org.datanucleus.ExecutionContext) Collection(java.util.Collection) DatastoreClass(org.datanucleus.store.rdbms.table.DatastoreClass) AbstractMemberMetaData(org.datanucleus.metadata.AbstractMemberMetaData) Map(java.util.Map) ElementContainerTable(org.datanucleus.store.rdbms.table.ElementContainerTable)

Example 64 with ExecutionContext

use of org.datanucleus.ExecutionContext in project datanucleus-core by datanucleus.

the class ExecutionContextPool method checkOut.

public synchronized ExecutionContext checkOut(Object owner, Map<String, Object> options) {
    long now = System.currentTimeMillis();
    ExecutionContext ec;
    if (!recyclableECs.isEmpty()) {
        Iterator<Entry<ExecutionContext, Long>> recycIter = recyclableECs.entrySet().iterator();
        while (recycIter.hasNext()) {
            Entry<ExecutionContext, Long> recycEntry = recycIter.next();
            ec = recycEntry.getKey();
            if ((now - recycEntry.getValue()) > expirationTime) {
                // object has expired
                recycIter.remove();
                expire(ec);
                ec = null;
            } else {
                if (validate(ec)) {
                    recycIter.remove();
                    ec.initialise(owner, options);
                    return ec;
                }
                // object failed validation
                recycIter.remove();
                expire(ec);
                ec = null;
            }
        }
    }
    // no objects available, create a new one
    ec = create(owner, options);
    return ec;
}
Also used : Entry(java.util.Map.Entry) ExecutionContext(org.datanucleus.ExecutionContext)

Example 65 with ExecutionContext

use of org.datanucleus.ExecutionContext in project datanucleus-core by datanucleus.

the class SCOUtils method validateObjectForWriting.

/**
 * Method to check if an object to be stored in a SCO container is already persistent, or is managed by a
 * different ExecutionContext. If not persistent, this call will persist it.
 * If not yet flushed to the datastore this call will flush it.
 * @param ec ExecutionContext
 * @param object The object
 * @param fieldValues Values for any fields when persisting (if the object needs persisting)
 * @return Whether the object was persisted during this call
 */
public static boolean validateObjectForWriting(ExecutionContext ec, Object object, FieldValues fieldValues) {
    boolean persisted = false;
    ApiAdapter api = ec.getApiAdapter();
    if (api.isPersistable(object)) {
        ExecutionContext objectEC = api.getExecutionContext(object);
        if (objectEC != null && ec != objectEC) {
            throw new NucleusUserException(Localiser.msg("023009", StringUtils.toJVMIDString(object)), api.getIdForObject(object));
        } else if (!api.isPersistent(object)) {
            // Not persistent, so either is detached, or needs persisting for first time
            boolean exists = false;
            if (api.isDetached(object)) {
                if (ec.getBooleanProperty(PropertyNames.PROPERTY_ATTACH_SAME_DATASTORE)) {
                    // Assume that it is detached from this datastore
                    exists = true;
                } else {
                    // Check if the (attached) object exists in this datastore
                    try {
                        Object obj = ec.findObject(api.getIdForObject(object), true, false, object.getClass().getName());
                        if (obj != null) {
                            // PM.getObjectById creates a dummy object to represent this object and
                            // automatically
                            // enlists it in the txn. Evict it to avoid issues with reachability
                            ObjectProvider objSM = ec.findObjectProvider(obj);
                            if (objSM != null) {
                                ec.evictFromTransaction(objSM);
                            }
                        }
                        exists = true;
                    } catch (NucleusObjectNotFoundException onfe) {
                        exists = false;
                    }
                }
            }
            if (!exists) {
                // Persist the object
                ec.persistObjectInternal(object, fieldValues, ObjectProvider.PC);
                persisted = true;
            }
        } else {
            // Persistent state, but is it flushed to the datastore?
            ObjectProvider objectSM = ec.findObjectProvider(object);
            if (objectSM.isWaitingToBeFlushedToDatastore()) {
                // Process any fieldValues
                if (fieldValues != null) {
                    objectSM.loadFieldValues(fieldValues);
                }
                // Now flush it
                objectSM.flush();
                // Mark as being persisted since is now in the datastore
                persisted = true;
            }
        }
    }
    return persisted;
}
Also used : ApiAdapter(org.datanucleus.api.ApiAdapter) ExecutionContext(org.datanucleus.ExecutionContext) NucleusUserException(org.datanucleus.exceptions.NucleusUserException) ObjectProvider(org.datanucleus.state.ObjectProvider) NucleusObjectNotFoundException(org.datanucleus.exceptions.NucleusObjectNotFoundException)

Aggregations

ExecutionContext (org.datanucleus.ExecutionContext)178 ObjectProvider (org.datanucleus.state.ObjectProvider)85 NucleusDataStoreException (org.datanucleus.exceptions.NucleusDataStoreException)73 SQLException (java.sql.SQLException)66 ManagedConnection (org.datanucleus.store.connection.ManagedConnection)64 SQLController (org.datanucleus.store.rdbms.SQLController)63 PreparedStatement (java.sql.PreparedStatement)62 Iterator (java.util.Iterator)56 MappedDatastoreException (org.datanucleus.store.rdbms.exceptions.MappedDatastoreException)27 ResultSet (java.sql.ResultSet)26 AbstractMemberMetaData (org.datanucleus.metadata.AbstractMemberMetaData)26 StatementMappingIndex (org.datanucleus.store.rdbms.query.StatementMappingIndex)25 Map (java.util.Map)23 JavaTypeMapping (org.datanucleus.store.rdbms.mapping.java.JavaTypeMapping)20 Collection (java.util.Collection)18 ClassLoaderResolver (org.datanucleus.ClassLoaderResolver)18 StatementClassMapping (org.datanucleus.store.rdbms.query.StatementClassMapping)17 DatastoreClass (org.datanucleus.store.rdbms.table.DatastoreClass)16 SCOCollectionIterator (org.datanucleus.store.types.SCOCollectionIterator)16 ArrayList (java.util.ArrayList)15