use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class DataDomainQueryAction method interceptRelationshipQuery.
private boolean interceptRelationshipQuery() {
if (query instanceof RelationshipQuery) {
RelationshipQuery relationshipQuery = (RelationshipQuery) query;
if (relationshipQuery.isRefreshing()) {
return !DONE;
}
ObjRelationship relationship = relationshipQuery.getRelationship(domain.getEntityResolver());
// check if we can derive target PK from FK...
if (relationship.isSourceIndependentFromTargetChange()) {
return !DONE;
}
// we can assume that there is one and only one DbRelationship as
// we previously checked that "!isSourceIndependentFromTargetChange"
DbRelationship dbRelationship = relationship.getDbRelationships().get(0);
// FK pointing to a unique field that is a 'fake' PK (CAY-1755)...
// It is not sufficient to generate target ObjectId.
DbEntity targetEntity = dbRelationship.getTargetEntity();
if (dbRelationship.getJoins().size() < targetEntity.getPrimaryKeys().size()) {
return !DONE;
}
if (cache == null) {
return !DONE;
}
DataRow sourceRow = cache.getCachedSnapshot(relationshipQuery.getObjectId());
if (sourceRow == null) {
return !DONE;
}
ObjectId targetId = sourceRow.createTargetObjectId(relationship.getTargetEntityName(), dbRelationship);
// null id means that FK is null...
if (targetId == null) {
this.response = new GenericResponse(Collections.EMPTY_LIST);
return DONE;
}
// target id resolution (unlike source) should be polymorphic
DataRow targetRow = polymorphicRowFromCache(targetId);
if (targetRow != null) {
this.response = new GenericResponse(Collections.singletonList(targetRow));
return DONE;
}
// create a fault
if (context != null && relationship.isSourceDefiningTargetPrecenseAndType(domain.getEntityResolver())) {
// prevent passing partial snapshots to ObjectResolver per
// CAY-724.
// Create a hollow object right here and skip object conversion
// downstream
this.noObjectConversion = true;
Object object = context.findOrCreateObject(targetId);
this.response = new GenericResponse(Collections.singletonList(object));
return DONE;
}
}
return !DONE;
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class DataDomainUpdateBucket method appendQueriesInternal.
@Override
void appendQueriesInternal(Collection<Query> queries) {
DataDomainDBDiffBuilder diffBuilder = new DataDomainDBDiffBuilder();
DataNodeSyncQualifierDescriptor qualifierBuilder = new DataNodeSyncQualifierDescriptor();
for (DbEntity dbEntity : dbEntities) {
Collection<DbEntityClassDescriptor> descriptors = descriptorsByDbEntity.get(dbEntity);
Map<Object, Query> batches = new LinkedHashMap<>();
for (DbEntityClassDescriptor descriptor : descriptors) {
ObjEntity entity = descriptor.getEntity();
diffBuilder.reset(descriptor);
qualifierBuilder.reset(descriptor);
boolean isRootDbEntity = entity.getDbEntity() == dbEntity;
for (Persistent o : objectsByDescriptor.get(descriptor.getClassDescriptor())) {
ObjectDiff diff = parent.objectDiff(o.getObjectId());
Map<String, Object> snapshot = diffBuilder.buildDBDiff(diff);
// check whether MODIFIED object has real db-level modifications
if (snapshot == null) {
continue;
}
// after we filtered out "fake" modifications, check if an
// attempt is made to modify a read only entity
checkReadOnly(entity);
Map<String, Object> qualifierSnapshot = qualifierBuilder.createQualifierSnapshot(diff);
// organize batches by the updated columns + nulls in qualifier
Set<String> snapshotSet = snapshot.keySet();
Set<String> nullQualifierNames = new HashSet<>();
for (Map.Entry<String, Object> entry : qualifierSnapshot.entrySet()) {
if (entry.getValue() == null) {
nullQualifierNames.add(entry.getKey());
}
}
List<Set<String>> batchKey = Arrays.asList(snapshotSet, nullQualifierNames);
UpdateBatchQuery batch = (UpdateBatchQuery) batches.get(batchKey);
if (batch == null) {
batch = new UpdateBatchQuery(dbEntity, qualifierBuilder.getAttributes(), updatedAttributes(dbEntity, snapshot), nullQualifierNames, 10);
batch.setUsingOptimisticLocking(qualifierBuilder.isUsingOptimisticLocking());
batches.put(batchKey, batch);
}
batch.add(qualifierSnapshot, snapshot, o.getObjectId());
// update replacement id with meaningful PK changes
if (isRootDbEntity) {
Map<String, Object> replacementId = o.getObjectId().getReplacementIdMap();
for (DbAttribute pk : dbEntity.getPrimaryKeys()) {
String name = pk.getName();
if (snapshot.containsKey(name) && !replacementId.containsKey(name)) {
replacementId.put(name, snapshot.get(name));
}
}
}
}
}
queries.addAll(batches.values());
}
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class DbGenerator method buildStatements.
/**
* Creates and stores internally a set of statements for database schema
* creation, ignoring configured schema creation preferences. Statements are
* NOT executed in this method.
*/
protected void buildStatements() {
dropTables = new HashMap<>();
createTables = new HashMap<>();
createConstraints = new HashMap<>();
DbAdapter adapter = getAdapter();
for (final DbEntity dbe : this.dbEntitiesInInsertOrder) {
String name = dbe.getName();
// build "DROP TABLE"
dropTables.put(name, adapter.dropTableStatements(dbe));
// build "CREATE TABLE"
createTables.put(name, adapter.createTable(dbe));
// build constraints
createConstraints.put(name, createConstraintsQueries(dbe));
}
PkGenerator pkGenerator = adapter.getPkGenerator();
dropPK = pkGenerator.dropAutoPkStatements(dbEntitiesRequiringAutoPK);
createPK = pkGenerator.createAutoPkStatements(dbEntitiesRequiringAutoPK);
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class DbGenerator method runGenerator.
/**
* Executes a set of commands to drop/create database objects. This is the
* main worker method of DbGenerator. Command set is built based on
* pre-configured generator settings.
*/
public void runGenerator(DataSource ds) throws Exception {
this.failures = null;
try (Connection connection = ds.getConnection()) {
// force connection to autocommit, see CAY-2354
boolean autoCommit = connection.getAutoCommit();
connection.setAutoCommit(true);
try {
// drop tables
if (shouldDropTables) {
ListIterator<DbEntity> it = dbEntitiesInInsertOrder.listIterator(dbEntitiesInInsertOrder.size());
while (it.hasPrevious()) {
DbEntity ent = it.previous();
for (String statement : dropTables.get(ent.getName())) {
safeExecute(connection, statement);
}
}
}
// create tables
List<String> createdTables = new ArrayList<>();
if (shouldCreateTables) {
for (final DbEntity ent : dbEntitiesInInsertOrder) {
// only create missing tables
safeExecute(connection, createTables.get(ent.getName()));
createdTables.add(ent.getName());
}
}
// create FK
if (shouldCreateTables && shouldCreateFKConstraints) {
for (DbEntity ent : dbEntitiesInInsertOrder) {
if (createdTables.contains(ent.getName())) {
List<String> fks = createConstraints.get(ent.getName());
for (String fk : fks) {
safeExecute(connection, fk);
}
}
}
}
// drop PK
if (shouldDropPKSupport) {
List<String> dropAutoPKSQL = getAdapter().getPkGenerator().dropAutoPkStatements(dbEntitiesRequiringAutoPK);
for (final String sql : dropAutoPKSQL) {
safeExecute(connection, sql);
}
}
// create pk
if (shouldCreatePKSupport) {
List<String> createAutoPKSQL = getAdapter().getPkGenerator().createAutoPkStatements(dbEntitiesRequiringAutoPK);
for (final String sql : createAutoPKSQL) {
safeExecute(connection, sql);
}
}
new DbGeneratorPostprocessor().execute(connection, getAdapter());
} finally {
// restore connection autocommit state in case it will be recycled in some underlying pool
connection.setAutoCommit(autoCommit);
}
}
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class ObjectDiff method addPhantomFkDiff.
private void addPhantomFkDiff(ArcOperation arcDiff) {
String arcId = arcDiff.getArcId().toString();
DbEntity dbEntity = classDescriptor.getEntity().getDbEntity();
DbRelationship dbRelationship = (DbRelationship) dbEntity.getRelationship(arcId.substring(ASTDbPath.DB_PREFIX.length()));
if (dbRelationship.isToMany()) {
return;
}
if (currentArcSnapshot == null) {
currentArcSnapshot = new HashMap<>();
}
currentArcSnapshot.put(arcId, arcDiff.getTargetNodeId());
if (phantomFks == null) {
phantomFks = new HashMap<>();
}
ArcOperation oldOp = phantomFks.put(arcDiff, arcDiff);
// "delete" cancels "create" and vice versa...
if (oldOp != null && oldOp.isDelete() != arcDiff.isDelete()) {
phantomFks.remove(arcDiff);
if (otherDiffs != null) {
otherDiffs.remove(oldOp);
}
}
}
Aggregations