use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class DataDomainFlushObserver method nextGeneratedRows.
/**
* Processes generated keys.
*
* @since 1.2
*/
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public void nextGeneratedRows(Query query, ResultIterator keysIterator, ObjectId idToUpdate) {
// read and close the iterator before doing anything else
List<DataRow> keys;
try {
keys = (List<DataRow>) keysIterator.allRows();
} finally {
keysIterator.close();
}
if (!(query instanceof InsertBatchQuery)) {
throw new CayenneRuntimeException("Generated keys only supported for InsertBatchQuery, instead got %s", query);
}
if (idToUpdate == null || !idToUpdate.isTemporary()) {
// why would this happen?
return;
}
if (keys.size() != 1) {
throw new CayenneRuntimeException("One and only one PK row is expected, instead got %d", keys.size());
}
DataRow key = keys.get(0);
// empty key?
if (key.size() == 0) {
throw new CayenneRuntimeException("Empty key generated.");
}
// infer the key name and currently will only support a single column...
if (key.size() > 1) {
throw new CayenneRuntimeException("Only a single column autogenerated PK is supported. " + "Generated key: %s", key);
}
BatchQuery batch = (BatchQuery) query;
for (DbAttribute attribute : batch.getDbEntity().getGeneratedAttributes()) {
// DB DEFAULT values. Ignore those.
if (attribute.isPrimaryKey()) {
Object value = key.values().iterator().next();
// Log the generated PK
logger.logGeneratedKey(attribute, value);
// I guess we should override any existing value,
// as generated key is the latest thing that exists in the DB.
idToUpdate.getReplacementIdMap().put(attribute.getName(), value);
break;
}
}
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class QueryCreatorVisitor method visitInsert.
@Override
public Void visitInsert(InsertDbRowOp dbRow) {
InsertBatchQuery query;
if (lastRow == null || !lastRow.isSameBatch(dbRow)) {
query = new InsertBatchQuery(dbRow.getEntity(), batchSize);
queryList.add(query);
lastBatch = query;
} else {
query = (InsertBatchQuery) lastBatch;
}
query.add(dbRow.getValues().getSnapshot(), dbRow.getChangeId());
lastRow = dbRow;
return null;
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class InsertBatchTranslatorIT method testCreateSqlString.
@Test
public void testCreateSqlString() throws Exception {
DbEntity entity = runtime.getDataDomain().getEntityResolver().getObjEntity(SimpleLockingTestEntity.class).getDbEntity();
DbAdapter adapter = objectFactory.newInstance(DbAdapter.class, JdbcAdapter.class.getName());
InsertBatchQuery insertQuery = new InsertBatchQuery(entity, 1);
InsertBatchTranslator builder = new InsertBatchTranslator(insertQuery, adapter);
String generatedSql = builder.getSql();
assertNotNull(generatedSql);
assertEquals("INSERT INTO " + entity.getName() + " (DESCRIPTION, INT_COLUMN_NOTNULL, INT_COLUMN_NULL, LOCKING_TEST_ID, NAME) VALUES (?, ?, ?, ?, ?)", generatedSql);
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class BatchActionGeneratedIT method testHasGeneratedKeys1.
@Test
public void testHasGeneratedKeys1() throws Exception {
EntityResolver resolver = runtime.getChannel().getEntityResolver();
// test with adapter that supports keys
JdbcAdapter adapter = buildAdapter(true);
InsertBatchQuery batch1 = new InsertBatchQuery(resolver.getObjEntity(GeneratedColumnTestEntity.class).getDbEntity(), 5);
DataNode node = new DataNode();
node.setAdapter(adapter);
node.setEntityResolver(resolver);
node.setRowReaderFactory(mock(RowReaderFactory.class));
assertTrue(new BatchAction(batch1, node, false).hasGeneratedKeys());
}
use of org.apache.cayenne.query.InsertBatchQuery in project cayenne by apache.
the class BatchActionIT method testHasGeneratedKeys1.
@Test
public void testHasGeneratedKeys1() throws Exception {
EntityResolver resolver = runtime.getChannel().getEntityResolver();
// test with adapter that supports keys
JdbcAdapter adapter = buildAdapter(true);
DataNode node = new DataNode();
node.setAdapter(adapter);
node.setEntityResolver(resolver);
node.setRowReaderFactory(mock(RowReaderFactory.class));
InsertBatchQuery batch2 = new InsertBatchQuery(resolver.getObjEntity(Artist.class).getDbEntity(), 5);
assertFalse(new BatchAction(batch2, node, false).hasGeneratedKeys());
}
Aggregations