use of org.apache.cayenne.map.EntitySorter in project cayenne by apache.
the class DefaultDbRowOpSorterTest method createSorter.
@Before
public void createSorter() {
entitySorter = mock(EntitySorter.class);
EntityResolver entityResolver = mock(EntityResolver.class);
when(entitySorter.getDbEntityComparator()).thenReturn(Comparator.comparing(DbEntity::getName));
when(entitySorter.isReflexive(argThat(ent -> ent.getName().equals("reflexive")))).thenReturn(true);
DataDomain dataDomain = mock(DataDomain.class);
when(dataDomain.getEntitySorter()).thenReturn(entitySorter);
when(dataDomain.getEntityResolver()).thenReturn(entityResolver);
sorter = new DefaultDbRowOpSorter(() -> dataDomain);
}
use of org.apache.cayenne.map.EntitySorter in project cayenne by apache.
the class DefaultDbRowOpSorter method sortReflexive.
protected void sortReflexive(List<DbRowOp> sortedDbRows) {
DataDomain dataDomain = dataDomainProvider.get();
EntitySorter sorter = dataDomain.getEntitySorter();
EntityResolver resolver = dataDomain.getEntityResolver();
DbEntity lastEntity = null;
int start = 0;
int idx = 0;
DbRowOp lastRow = null;
for (DbRowOp row : sortedDbRows) {
if (row.getEntity() != lastEntity) {
start = idx;
if (lastEntity != null && sorter.isReflexive(lastEntity)) {
ObjEntity objEntity = resolver.getObjEntity(lastRow.getObject().getObjectId().getEntityName());
List<DbRowOp> reflexiveSublist = sortedDbRows.subList(start, idx);
sorter.sortObjectsForEntity(objEntity, reflexiveSublist, lastRow instanceof DeleteDbRowOp);
}
lastEntity = row.getEntity();
}
lastRow = row;
idx++;
}
// sort last chunk
if (lastEntity != null && sorter.isReflexive(lastEntity)) {
ObjEntity objEntity = resolver.getObjEntity(lastRow.getObject().getObjectId().getEntityName());
List<DbRowOp> reflexiveSublist = sortedDbRows.subList(start, idx);
sorter.sortObjectsForEntity(objEntity, reflexiveSublist, lastRow instanceof DeleteDbRowOp);
}
}
use of org.apache.cayenne.map.EntitySorter in project cayenne by apache.
the class DataDomainDeleteBucket method appendQueriesInternal.
@Override
void appendQueriesInternal(Collection<Query> queries) {
DataNodeSyncQualifierDescriptor qualifierBuilder = new DataNodeSyncQualifierDescriptor();
EntitySorter sorter = parent.getDomain().getEntitySorter();
sorter.sortDbEntities(dbEntities, true);
for (DbEntity dbEntity : dbEntities) {
Collection<DbEntityClassDescriptor> descriptors = descriptorsByDbEntity.get(dbEntity);
Map<Object, Query> batches = new LinkedHashMap<>();
for (DbEntityClassDescriptor descriptor : descriptors) {
qualifierBuilder.reset(descriptor);
boolean isRootDbEntity = descriptor.isMaster();
// remove object set for dependent entity, so that it does not show up
// on post processing
List<Persistent> objects = objectsByDescriptor.get(descriptor.getClassDescriptor());
if (objects.isEmpty()) {
continue;
}
checkReadOnly(descriptor.getEntity());
if (isRootDbEntity) {
sorter.sortObjectsForEntity(descriptor.getEntity(), objects, true);
}
for (Persistent o : objects) {
ObjectDiff diff = parent.objectDiff(o.getObjectId());
Map<String, Object> qualifierSnapshot = qualifierBuilder.createQualifierSnapshot(diff);
// organize batches by the nulls in qualifier
Set<String> nullQualifierNames = new HashSet<String>();
for (Map.Entry<String, ?> entry : qualifierSnapshot.entrySet()) {
if (entry.getValue() == null) {
nullQualifierNames.add(entry.getKey());
}
}
Object batchKey = Arrays.asList(nullQualifierNames);
DeleteBatchQuery batch = (DeleteBatchQuery) batches.get(batchKey);
if (batch == null) {
batch = new DeleteBatchQuery(dbEntity, qualifierBuilder.getAttributes(), nullQualifierNames, 27);
batch.setUsingOptimisticLocking(qualifierBuilder.isUsingOptimisticLocking());
batches.put(batchKey, batch);
}
batch.add(qualifierSnapshot);
}
}
queries.addAll(batches.values());
}
}
use of org.apache.cayenne.map.EntitySorter in project cayenne by apache.
the class DbGenerator method prepareDbEntities.
/**
* Helper method that orders DbEntities to satisfy referential constraints
* and returns an ordered list. It also filters out DerivedDbEntities.
*/
private void prepareDbEntities(Collection<DbEntity> excludedEntities) {
if (excludedEntities == null) {
excludedEntities = Collections.emptyList();
}
List<DbEntity> tables = new ArrayList<>();
List<DbEntity> tablesWithAutoPk = new ArrayList<>();
for (DbEntity nextEntity : map.getDbEntities()) {
// tables with no columns are not included
if (nextEntity.getAttributes().size() == 0) {
logObj.info("Skipping entity with no attributes: " + nextEntity.getName());
continue;
}
// check if this entity is explicitly excluded
if (excludedEntities.contains(nextEntity)) {
continue;
}
// tables with invalid DbAttributes are not included
boolean invalidAttributes = false;
for (final DbAttribute attr : nextEntity.getAttributes()) {
if (attr.getType() == TypesMapping.NOT_DEFINED) {
logObj.info("Skipping entity, attribute type is undefined: " + nextEntity.getName() + "." + attr.getName());
invalidAttributes = true;
break;
}
}
if (invalidAttributes) {
continue;
}
tables.add(nextEntity);
// check if an automatic PK generation can be potentially supported
// in this entity. For now simply check that the key is not
// propagated
Iterator<DbRelationship> relationships = nextEntity.getRelationships().iterator();
// create a copy of the original PK list,
// since the list will be modified locally
List<DbAttribute> pkAttributes = new ArrayList<>(nextEntity.getPrimaryKeys());
while (pkAttributes.size() > 0 && relationships.hasNext()) {
DbRelationship nextRelationship = relationships.next();
if (!nextRelationship.isToMasterPK()) {
continue;
}
// so
for (DbJoin join : nextRelationship.getJoins()) {
pkAttributes.remove(join.getSource());
}
}
// is not propagated via relationship
if (pkAttributes.size() > 0) {
tablesWithAutoPk.add(nextEntity);
}
}
// sort table list
if (tables.size() > 1) {
EntitySorter sorter = new AshwoodEntitySorter();
sorter.setEntityResolver(new EntityResolver(Collections.singleton(map)));
sorter.sortDbEntities(tables, false);
}
this.dbEntitiesInInsertOrder = tables;
this.dbEntitiesRequiringAutoPK = tablesWithAutoPk;
}
use of org.apache.cayenne.map.EntitySorter in project cayenne by apache.
the class DataDomainInsertBucket method appendQueriesInternal.
@Override
void appendQueriesInternal(Collection<Query> queries) {
DataDomainDBDiffBuilder diffBuilder = new DataDomainDBDiffBuilder();
EntitySorter sorter = parent.getDomain().getEntitySorter();
sorter.sortDbEntities(dbEntities, false);
for (DbEntity dbEntity : dbEntities) {
Collection<DbEntityClassDescriptor> descriptors = descriptorsByDbEntity.get(dbEntity);
InsertBatchQuery batch = new InsertBatchQuery(dbEntity, 27);
for (DbEntityClassDescriptor descriptor : descriptors) {
diffBuilder.reset(descriptor);
List<Persistent> objects = objectsByDescriptor.get(descriptor.getClassDescriptor());
if (objects.isEmpty()) {
continue;
}
checkReadOnly(descriptor.getEntity());
createPermIds(descriptor, objects);
sorter.sortObjectsForEntity(descriptor.getEntity(), objects, false);
for (Persistent o : objects) {
Map<String, Object> snapshot = diffBuilder.buildDBDiff(parent.objectDiff(o.getObjectId()));
// values so creating an empty changes map
if (snapshot == null) {
snapshot = new HashMap<>();
}
batch.add(snapshot, o.getObjectId());
if (!descriptor.isMaster()) {
trackFlattenedInsert(descriptor, o);
}
}
}
queries.add(batch);
}
}
Aggregations