Search in sources :

Example 6 with DbEntity

use of org.apache.cayenne.map.DbEntity in project cayenne by apache.

the class DataDomainQueryAction method interceptRelationshipQuery.

private boolean interceptRelationshipQuery() {
    if (query instanceof RelationshipQuery) {
        RelationshipQuery relationshipQuery = (RelationshipQuery) query;
        if (relationshipQuery.isRefreshing()) {
            return !DONE;
        }
        ObjRelationship relationship = relationshipQuery.getRelationship(domain.getEntityResolver());
        // check if we can derive target PK from FK...
        if (relationship.isSourceIndependentFromTargetChange()) {
            return !DONE;
        }
        // we can assume that there is one and only one DbRelationship as
        // we previously checked that "!isSourceIndependentFromTargetChange"
        DbRelationship dbRelationship = relationship.getDbRelationships().get(0);
        // FK pointing to a unique field that is a 'fake' PK (CAY-1755)...
        // It is not sufficient to generate target ObjectId.
        DbEntity targetEntity = dbRelationship.getTargetEntity();
        if (dbRelationship.getJoins().size() < targetEntity.getPrimaryKeys().size()) {
            return !DONE;
        }
        if (cache == null) {
            return !DONE;
        }
        DataRow sourceRow = cache.getCachedSnapshot(relationshipQuery.getObjectId());
        if (sourceRow == null) {
            return !DONE;
        }
        ObjectId targetId = sourceRow.createTargetObjectId(relationship.getTargetEntityName(), dbRelationship);
        // null id means that FK is null...
        if (targetId == null) {
            this.response = new GenericResponse(Collections.EMPTY_LIST);
            return DONE;
        }
        // target id resolution (unlike source) should be polymorphic
        DataRow targetRow = polymorphicRowFromCache(targetId);
        if (targetRow != null) {
            this.response = new GenericResponse(Collections.singletonList(targetRow));
            return DONE;
        }
        // create a fault
        if (context != null && relationship.isSourceDefiningTargetPrecenseAndType(domain.getEntityResolver())) {
            // prevent passing partial snapshots to ObjectResolver per
            // CAY-724.
            // Create a hollow object right here and skip object conversion
            // downstream
            this.noObjectConversion = true;
            Object object = context.findOrCreateObject(targetId);
            this.response = new GenericResponse(Collections.singletonList(object));
            return DONE;
        }
    }
    return !DONE;
}
Also used : ObjRelationship(org.apache.cayenne.map.ObjRelationship) DbEntity(org.apache.cayenne.map.DbEntity) RelationshipQuery(org.apache.cayenne.query.RelationshipQuery) ObjectId(org.apache.cayenne.ObjectId) GenericResponse(org.apache.cayenne.util.GenericResponse) DbRelationship(org.apache.cayenne.map.DbRelationship) DataRow(org.apache.cayenne.DataRow)

Example 7 with DbEntity

use of org.apache.cayenne.map.DbEntity in project cayenne by apache.

the class DataDomainUpdateBucket method appendQueriesInternal.

@Override
void appendQueriesInternal(Collection<Query> queries) {
    DataDomainDBDiffBuilder diffBuilder = new DataDomainDBDiffBuilder();
    DataNodeSyncQualifierDescriptor qualifierBuilder = new DataNodeSyncQualifierDescriptor();
    for (DbEntity dbEntity : dbEntities) {
        Collection<DbEntityClassDescriptor> descriptors = descriptorsByDbEntity.get(dbEntity);
        Map<Object, Query> batches = new LinkedHashMap<>();
        for (DbEntityClassDescriptor descriptor : descriptors) {
            ObjEntity entity = descriptor.getEntity();
            diffBuilder.reset(descriptor);
            qualifierBuilder.reset(descriptor);
            boolean isRootDbEntity = entity.getDbEntity() == dbEntity;
            for (Persistent o : objectsByDescriptor.get(descriptor.getClassDescriptor())) {
                ObjectDiff diff = parent.objectDiff(o.getObjectId());
                Map<String, Object> snapshot = diffBuilder.buildDBDiff(diff);
                // check whether MODIFIED object has real db-level modifications
                if (snapshot == null) {
                    continue;
                }
                // after we filtered out "fake" modifications, check if an
                // attempt is made to modify a read only entity
                checkReadOnly(entity);
                Map<String, Object> qualifierSnapshot = qualifierBuilder.createQualifierSnapshot(diff);
                // organize batches by the updated columns + nulls in qualifier
                Set<String> snapshotSet = snapshot.keySet();
                Set<String> nullQualifierNames = new HashSet<>();
                for (Map.Entry<String, Object> entry : qualifierSnapshot.entrySet()) {
                    if (entry.getValue() == null) {
                        nullQualifierNames.add(entry.getKey());
                    }
                }
                List<Set<String>> batchKey = Arrays.asList(snapshotSet, nullQualifierNames);
                UpdateBatchQuery batch = (UpdateBatchQuery) batches.get(batchKey);
                if (batch == null) {
                    batch = new UpdateBatchQuery(dbEntity, qualifierBuilder.getAttributes(), updatedAttributes(dbEntity, snapshot), nullQualifierNames, 10);
                    batch.setUsingOptimisticLocking(qualifierBuilder.isUsingOptimisticLocking());
                    batches.put(batchKey, batch);
                }
                batch.add(qualifierSnapshot, snapshot, o.getObjectId());
                // update replacement id with meaningful PK changes
                if (isRootDbEntity) {
                    Map<String, Object> replacementId = o.getObjectId().getReplacementIdMap();
                    for (DbAttribute pk : dbEntity.getPrimaryKeys()) {
                        String name = pk.getName();
                        if (snapshot.containsKey(name) && !replacementId.containsKey(name)) {
                            replacementId.put(name, snapshot.get(name));
                        }
                    }
                }
            }
        }
        queries.addAll(batches.values());
    }
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) Query(org.apache.cayenne.query.Query) UpdateBatchQuery(org.apache.cayenne.query.UpdateBatchQuery) DbAttribute(org.apache.cayenne.map.DbAttribute) Persistent(org.apache.cayenne.Persistent) LinkedHashMap(java.util.LinkedHashMap) ObjEntity(org.apache.cayenne.map.ObjEntity) DbEntity(org.apache.cayenne.map.DbEntity) UpdateBatchQuery(org.apache.cayenne.query.UpdateBatchQuery) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) HashSet(java.util.HashSet)

Example 8 with DbEntity

use of org.apache.cayenne.map.DbEntity in project cayenne by apache.

the class DbGenerator method buildStatements.

/**
 * Creates and stores internally a set of statements for database schema
 * creation, ignoring configured schema creation preferences. Statements are
 * NOT executed in this method.
 */
protected void buildStatements() {
    dropTables = new HashMap<>();
    createTables = new HashMap<>();
    createConstraints = new HashMap<>();
    DbAdapter adapter = getAdapter();
    for (final DbEntity dbe : this.dbEntitiesInInsertOrder) {
        String name = dbe.getName();
        // build "DROP TABLE"
        dropTables.put(name, adapter.dropTableStatements(dbe));
        // build "CREATE TABLE"
        createTables.put(name, adapter.createTable(dbe));
        // build constraints
        createConstraints.put(name, createConstraintsQueries(dbe));
    }
    PkGenerator pkGenerator = adapter.getPkGenerator();
    dropPK = pkGenerator.dropAutoPkStatements(dbEntitiesRequiringAutoPK);
    createPK = pkGenerator.createAutoPkStatements(dbEntitiesRequiringAutoPK);
}
Also used : DbAdapter(org.apache.cayenne.dba.DbAdapter) DbEntity(org.apache.cayenne.map.DbEntity) PkGenerator(org.apache.cayenne.dba.PkGenerator)

Example 9 with DbEntity

use of org.apache.cayenne.map.DbEntity in project cayenne by apache.

the class DbGenerator method runGenerator.

/**
 * Executes a set of commands to drop/create database objects. This is the
 * main worker method of DbGenerator. Command set is built based on
 * pre-configured generator settings.
 */
public void runGenerator(DataSource ds) throws Exception {
    this.failures = null;
    try (Connection connection = ds.getConnection()) {
        // force connection to autocommit, see CAY-2354
        boolean autoCommit = connection.getAutoCommit();
        connection.setAutoCommit(true);
        try {
            // drop tables
            if (shouldDropTables) {
                ListIterator<DbEntity> it = dbEntitiesInInsertOrder.listIterator(dbEntitiesInInsertOrder.size());
                while (it.hasPrevious()) {
                    DbEntity ent = it.previous();
                    for (String statement : dropTables.get(ent.getName())) {
                        safeExecute(connection, statement);
                    }
                }
            }
            // create tables
            List<String> createdTables = new ArrayList<>();
            if (shouldCreateTables) {
                for (final DbEntity ent : dbEntitiesInInsertOrder) {
                    // only create missing tables
                    safeExecute(connection, createTables.get(ent.getName()));
                    createdTables.add(ent.getName());
                }
            }
            // create FK
            if (shouldCreateTables && shouldCreateFKConstraints) {
                for (DbEntity ent : dbEntitiesInInsertOrder) {
                    if (createdTables.contains(ent.getName())) {
                        List<String> fks = createConstraints.get(ent.getName());
                        for (String fk : fks) {
                            safeExecute(connection, fk);
                        }
                    }
                }
            }
            // drop PK
            if (shouldDropPKSupport) {
                List<String> dropAutoPKSQL = getAdapter().getPkGenerator().dropAutoPkStatements(dbEntitiesRequiringAutoPK);
                for (final String sql : dropAutoPKSQL) {
                    safeExecute(connection, sql);
                }
            }
            // create pk
            if (shouldCreatePKSupport) {
                List<String> createAutoPKSQL = getAdapter().getPkGenerator().createAutoPkStatements(dbEntitiesRequiringAutoPK);
                for (final String sql : createAutoPKSQL) {
                    safeExecute(connection, sql);
                }
            }
            new DbGeneratorPostprocessor().execute(connection, getAdapter());
        } finally {
            // restore connection autocommit state in case it will be recycled in some underlying pool
            connection.setAutoCommit(autoCommit);
        }
    }
}
Also used : DbEntity(org.apache.cayenne.map.DbEntity) Connection(java.sql.Connection) ArrayList(java.util.ArrayList)

Example 10 with DbEntity

use of org.apache.cayenne.map.DbEntity in project cayenne by apache.

the class ObjectDiff method addPhantomFkDiff.

private void addPhantomFkDiff(ArcOperation arcDiff) {
    String arcId = arcDiff.getArcId().toString();
    DbEntity dbEntity = classDescriptor.getEntity().getDbEntity();
    DbRelationship dbRelationship = (DbRelationship) dbEntity.getRelationship(arcId.substring(ASTDbPath.DB_PREFIX.length()));
    if (dbRelationship.isToMany()) {
        return;
    }
    if (currentArcSnapshot == null) {
        currentArcSnapshot = new HashMap<>();
    }
    currentArcSnapshot.put(arcId, arcDiff.getTargetNodeId());
    if (phantomFks == null) {
        phantomFks = new HashMap<>();
    }
    ArcOperation oldOp = phantomFks.put(arcDiff, arcDiff);
    // "delete" cancels "create" and vice versa...
    if (oldOp != null && oldOp.isDelete() != arcDiff.isDelete()) {
        phantomFks.remove(arcDiff);
        if (otherDiffs != null) {
            otherDiffs.remove(oldOp);
        }
    }
}
Also used : DbEntity(org.apache.cayenne.map.DbEntity) DbRelationship(org.apache.cayenne.map.DbRelationship)

Aggregations

DbEntity (org.apache.cayenne.map.DbEntity)273 DbAttribute (org.apache.cayenne.map.DbAttribute)106 Test (org.junit.Test)106 ObjEntity (org.apache.cayenne.map.ObjEntity)64 DbRelationship (org.apache.cayenne.map.DbRelationship)55 DataMap (org.apache.cayenne.map.DataMap)47 ObjAttribute (org.apache.cayenne.map.ObjAttribute)26 ArrayList (java.util.ArrayList)25 DbJoin (org.apache.cayenne.map.DbJoin)24 MergerToken (org.apache.cayenne.dbsync.merge.token.MergerToken)20 ObjRelationship (org.apache.cayenne.map.ObjRelationship)19 CayenneRuntimeException (org.apache.cayenne.CayenneRuntimeException)16 JdbcAdapter (org.apache.cayenne.dba.JdbcAdapter)16 Entity (org.apache.cayenne.map.Entity)16 List (java.util.List)15 DbAdapter (org.apache.cayenne.dba.DbAdapter)15 EntityEvent (org.apache.cayenne.map.event.EntityEvent)14 HashMap (java.util.HashMap)12 SelectQuery (org.apache.cayenne.query.SelectQuery)12 DataChannelDescriptor (org.apache.cayenne.configuration.DataChannelDescriptor)11