use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class BatchActionIT method testHasGeneratedKeys2.
@Test
public void testHasGeneratedKeys2() throws Exception {
EntityResolver resolver = runtime.getChannel().getEntityResolver();
// test with adapter that does not support keys...
JdbcAdapter adapter = buildAdapter(false);
DataNode node = new DataNode();
node.setAdapter(adapter);
node.setEntityResolver(resolver);
node.setRowReaderFactory(mock(RowReaderFactory.class));
InsertBatchQuery batch2 = new InsertBatchQuery(resolver.getObjEntity(Artist.class).getDbEntity(), 5);
assertFalse(new BatchAction(batch2, node, false).hasGeneratedKeys());
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class InsertBatchTranslatorIT method testConstructor.
@Test
public void testConstructor() {
DbAdapter adapter = objectFactory.newInstance(DbAdapter.class, JdbcAdapter.class.getName());
InsertBatchQuery query = mock(InsertBatchQuery.class);
InsertBatchTranslator builder = new InsertBatchTranslator(query, adapter);
assertSame(adapter, builder.context.getAdapter());
assertSame(query, builder.context.getQuery());
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class InsertBatchTranslatorIT method testCreateSqlStringWithIdentifiersQuote.
@Test
public void testCreateSqlStringWithIdentifiersQuote() {
DbEntity entity = runtime.getDataDomain().getEntityResolver().getObjEntity(SimpleLockingTestEntity.class).getDbEntity();
try {
entity.getDataMap().setQuotingSQLIdentifiers(true);
JdbcAdapter adapter = (JdbcAdapter) this.adapter;
InsertBatchQuery insertQuery = new InsertBatchQuery(entity, 1);
InsertBatchTranslator builder = new InsertBatchTranslator(insertQuery, adapter);
String generatedSql = builder.getSql();
String charStart = unitAdapter.getIdentifiersStartQuote();
String charEnd = unitAdapter.getIdentifiersEndQuote();
assertNotNull(generatedSql);
assertEquals("INSERT INTO " + charStart + entity.getName() + charEnd + "( " + charStart + "DESCRIPTION" + charEnd + ", " + charStart + "INT_COLUMN_NOTNULL" + charEnd + ", " + charStart + "INT_COLUMN_NULL" + charEnd + ", " + charStart + "LOCKING_TEST_ID" + charEnd + ", " + charStart + "NAME" + charEnd + ") VALUES( ?, ?, ?, ?, ?)", generatedSql);
} finally {
entity.getDataMap().setQuotingSQLIdentifiers(false);
}
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class DefaultDataDomainFlushActionTest method createQueries.
@Test
public void createQueries() {
ObjectId id1 = ObjectId.of("test", "id", 1);
ObjectId id2 = ObjectId.of("test", "id", 2);
ObjectId id3 = ObjectId.of("test2", "id", 3);
ObjectId id4 = ObjectId.of("test2", "id", 4);
ObjectId id5 = ObjectId.of("test", "id", 5);
ObjectId id6 = ObjectId.of("test2", "id", 6);
ObjectId id7 = ObjectId.of("test", "id", 7);
DbEntity test = mockEntity("test");
DbEntity test2 = mockEntity("test2");
List<DbRowOp> ops = new ArrayList<>();
ops.add(new InsertDbRowOp(mockObject(id1), test, id1));
ops.add(new InsertDbRowOp(mockObject(id2), test, id2));
ops.add(new InsertDbRowOp(mockObject(id3), test2, id5));
ops.add(new InsertDbRowOp(mockObject(id4), test2, id7));
ops.add(new UpdateDbRowOp(mockObject(id5), test, id3));
ops.add(new DeleteDbRowOp(mockObject(id6), test2, id6));
ops.add(new DeleteDbRowOp(mockObject(id7), test, id4));
DefaultDataDomainFlushAction action = mock(DefaultDataDomainFlushAction.class);
when(action.createQueries((List<DbRowOp>) any(List.class))).thenCallRealMethod();
List<? extends Query> queries = action.createQueries(ops);
assertEquals(4, queries.size());
assertThat(queries.get(0), instanceOf(InsertBatchQuery.class));
InsertBatchQuery insert1 = (InsertBatchQuery) queries.get(0);
assertSame(test, insert1.getDbEntity());
assertEquals(2, insert1.getRows().size());
assertThat(queries.get(1), instanceOf(InsertBatchQuery.class));
InsertBatchQuery insert2 = (InsertBatchQuery) queries.get(1);
assertSame(test2, insert2.getDbEntity());
assertEquals(2, insert2.getRows().size());
assertThat(queries.get(2), instanceOf(DeleteBatchQuery.class));
DeleteBatchQuery delete1 = (DeleteBatchQuery) queries.get(2);
assertSame(test2, delete1.getDbEntity());
assertEquals(1, delete1.getRows().size());
assertThat(queries.get(3), instanceOf(DeleteBatchQuery.class));
DeleteBatchQuery delete2 = (DeleteBatchQuery) queries.get(3);
assertSame(test, delete2.getDbEntity());
assertEquals(1, delete2.getRows().size());
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class DataDomainFlattenedBucket method appendInserts.
/**
* responsible for adding the flattened Insert Queries. Its possible an insert query for the same DbEntity/ObjectId
* already has been added from the insert bucket queries if that Object also has an attribute. So we want to merge
* the data for each insert into a single insert.
*/
void appendInserts(Collection<Query> queries) {
for (Map.Entry<DbEntity, List<FlattenedArcKey>> entry : insertArcKeys.entrySet()) {
DbEntity dbEntity = entry.getKey();
List<FlattenedArcKey> flattenedArcKeys = entry.getValue();
DataNode node = parent.getDomain().lookupDataNode(dbEntity.getDataMap());
// TODO: O(N) lookup
InsertBatchQuery existingQuery = findInsertBatchQuery(queries, dbEntity);
InsertBatchQuery newQuery = new InsertBatchQuery(dbEntity, 50);
// merge the snapshots of the FAKs by ObjectId for all ToOne relationships in case we have multiple Arcs per Object
Map<ObjectId, Map<String, Object>> toOneSnapshots = new HashMap<>();
// gather the list of the ToMany snapshots (these will actually be their own insert rows)
List<Map<String, Object>> toManySnapshots = new ArrayList<>();
for (FlattenedArcKey flattenedArcKey : flattenedArcKeys) {
Map<String, Object> joinSnapshot = flattenedArcKey.buildJoinSnapshotForInsert(node);
if (flattenedArcKey.relationship.isToMany()) {
toManySnapshots.add(joinSnapshot);
} else {
ObjectId objectId = flattenedArcKey.id1.getSourceId();
Map<String, Object> snapshot = toOneSnapshots.get(objectId);
if (snapshot == null) {
toOneSnapshots.put(objectId, joinSnapshot);
} else {
// merge joinSnapshot data with existing snapshot
for (Map.Entry<String, Object> dbValue : joinSnapshot.entrySet()) {
snapshot.put(dbValue.getKey(), dbValue.getValue());
}
}
}
}
// apply the merged ToOne snapshots information and possibly merge it with an existing BatchQueryRow
for (Map.Entry<ObjectId, Map<String, Object>> flattenedSnapshot : toOneSnapshots.entrySet()) {
ObjectId objectId = flattenedSnapshot.getKey();
Map<String, Object> snapshot = flattenedSnapshot.getValue();
if (existingQuery != null) {
// TODO: O(N) lookup
BatchQueryRow existingRow = findRowForObjectId(existingQuery.getRows(), objectId);
if (existingRow != null) {
List<DbAttribute> existingQueryDbAttributes = existingQuery.getDbAttributes();
for (int i = 0; i < existingQueryDbAttributes.size(); i++) {
Object value = existingRow.getValue(i);
if (value != null) {
snapshot.put(existingQueryDbAttributes.get(i).getName(), value);
}
}
}
}
newQuery.add(snapshot, objectId);
}
// apply the ToMany snapshots as new BatchQueryRows
for (Map<String, Object> toManySnapshot : toManySnapshots) {
newQuery.add(toManySnapshot);
}
if (existingQuery != null) {
queries.remove(existingQuery);
}
queries.add(newQuery);
}
}
Aggregations