use of org.apache.cayenne.query.BatchQueryRow in project cayenne by apache.
the class DefaultBatchTranslatorIT method testAppendDbAttribute2.
@Test
public void testAppendDbAttribute2() throws Exception {
DbAdapter adapter = objectFactory.newInstance(DbAdapter.class, JdbcAdapter.class.getName());
DefaultBatchTranslator builder = new DefaultBatchTranslator(mock(BatchQuery.class), adapter, null) {
@Override
protected String createSql() {
return null;
}
@Override
protected DbAttributeBinding[] createBindings() {
return new DbAttributeBinding[0];
}
@Override
protected DbAttributeBinding[] doUpdateBindings(BatchQueryRow row) {
return new DbAttributeBinding[0];
}
};
StringBuilder buf = new StringBuilder();
DbEntity entity = new DbEntity("Test");
DbAttribute attr = new DbAttribute("testAttr", Types.CHAR, null);
attr.setEntity(entity);
builder.appendDbAttribute(buf, attr);
assertEquals("testAttr", buf.toString());
buf = new StringBuilder();
attr = new DbAttribute("testAttr", Types.VARCHAR, null);
attr.setEntity(entity);
builder.appendDbAttribute(buf, attr);
assertEquals("testAttr", buf.toString());
}
use of org.apache.cayenne.query.BatchQueryRow in project cayenne by apache.
the class DataDomainFlattenedBucket method appendInserts.
/**
* responsible for adding the flattened Insert Queries. Its possible an insert query for the same DbEntity/ObjectId
* already has been added from the insert bucket queries if that Object also has an attribute. So we want to merge
* the data for each insert into a single insert.
*/
void appendInserts(Collection<Query> queries) {
for (Map.Entry<DbEntity, List<FlattenedArcKey>> entry : insertArcKeys.entrySet()) {
DbEntity dbEntity = entry.getKey();
List<FlattenedArcKey> flattenedArcKeys = entry.getValue();
DataNode node = parent.getDomain().lookupDataNode(dbEntity.getDataMap());
// TODO: O(N) lookup
InsertBatchQuery existingQuery = findInsertBatchQuery(queries, dbEntity);
InsertBatchQuery newQuery = new InsertBatchQuery(dbEntity, 50);
// merge the snapshots of the FAKs by ObjectId for all ToOne relationships in case we have multiple Arcs per Object
Map<ObjectId, Map<String, Object>> toOneSnapshots = new HashMap<>();
// gather the list of the ToMany snapshots (these will actually be their own insert rows)
List<Map<String, Object>> toManySnapshots = new ArrayList<>();
for (FlattenedArcKey flattenedArcKey : flattenedArcKeys) {
Map<String, Object> joinSnapshot = flattenedArcKey.buildJoinSnapshotForInsert(node);
if (flattenedArcKey.relationship.isToMany()) {
toManySnapshots.add(joinSnapshot);
} else {
ObjectId objectId = flattenedArcKey.id1.getSourceId();
Map<String, Object> snapshot = toOneSnapshots.get(objectId);
if (snapshot == null) {
toOneSnapshots.put(objectId, joinSnapshot);
} else {
// merge joinSnapshot data with existing snapshot
for (Map.Entry<String, Object> dbValue : joinSnapshot.entrySet()) {
snapshot.put(dbValue.getKey(), dbValue.getValue());
}
}
}
}
// apply the merged ToOne snapshots information and possibly merge it with an existing BatchQueryRow
for (Map.Entry<ObjectId, Map<String, Object>> flattenedSnapshot : toOneSnapshots.entrySet()) {
ObjectId objectId = flattenedSnapshot.getKey();
Map<String, Object> snapshot = flattenedSnapshot.getValue();
if (existingQuery != null) {
// TODO: O(N) lookup
BatchQueryRow existingRow = findRowForObjectId(existingQuery.getRows(), objectId);
if (existingRow != null) {
List<DbAttribute> existingQueryDbAttributes = existingQuery.getDbAttributes();
for (int i = 0; i < existingQueryDbAttributes.size(); i++) {
Object value = existingRow.getValue(i);
if (value != null) {
snapshot.put(existingQueryDbAttributes.get(i).getName(), value);
}
}
}
}
newQuery.add(snapshot, objectId);
}
// apply the ToMany snapshots as new BatchQueryRows
for (Map<String, Object> toManySnapshot : toManySnapshots) {
newQuery.add(toManySnapshot);
}
if (existingQuery != null) {
queries.remove(existingQuery);
}
queries.add(newQuery);
}
}
use of org.apache.cayenne.query.BatchQueryRow in project cayenne by apache.
the class BatchAction method runAsIndividualQueries.
/**
* Executes batch as individual queries over the same prepared statement.
*/
protected void runAsIndividualQueries(Connection connection, BatchTranslator translator, OperationObserver delegate, boolean generatesKeys) throws SQLException, Exception {
JdbcEventLogger logger = dataNode.getJdbcEventLogger();
boolean useOptimisticLock = query.isUsingOptimisticLocking();
String queryStr = translator.getSql();
// log batch SQL execution
logger.log(queryStr);
// run batch queries one by one
DbAdapter adapter = dataNode.getAdapter();
try (PreparedStatement statement = prepareStatement(connection, queryStr, adapter, generatesKeys)) {
for (BatchQueryRow row : query.getRows()) {
DbAttributeBinding[] bindings = translator.updateBindings(row);
logger.logQueryParameters("bind", bindings);
bind(adapter, statement, bindings);
int updated = statement.executeUpdate();
if (useOptimisticLock && updated != 1) {
throw new OptimisticLockException(row.getObjectId(), query.getDbEntity(), queryStr, row.getQualifier());
}
delegate.nextCount(query, updated);
if (generatesKeys) {
processGeneratedKeys(statement, delegate, row);
}
logger.logUpdateCount(updated);
}
}
}
Aggregations