use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class SchemaBuilder method filterDataMap.
/**
* Remote binary pk {@link DbEntity} for {@link DbAdapter} not supporting
* that and so on.
*/
protected void filterDataMap(DataMap map) {
boolean supportsBinaryPK = unitDbAdapter.supportsBinaryPK();
if (supportsBinaryPK) {
return;
}
List<DbEntity> entitiesToRemove = new ArrayList<DbEntity>();
for (DbEntity ent : map.getDbEntities()) {
for (DbAttribute attr : ent.getAttributes()) {
// check for BIN PK or FK to BIN Pk
if (attr.getType() == Types.BINARY || attr.getType() == Types.VARBINARY || attr.getType() == Types.LONGVARBINARY) {
if (attr.isPrimaryKey() || attr.isForeignKey()) {
entitiesToRemove.add(ent);
break;
}
}
}
}
for (DbEntity e : entitiesToRemove) {
map.removeDbEntity(e.getName(), true);
}
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class SchemaBuilder method dropSchema.
private void dropSchema(DataNode node, DataMap map) throws Exception {
List<DbEntity> list = dbEntitiesInInsertOrder(node, map);
try (Connection conn = dataSourceFactory.getSharedDataSource().getConnection()) {
DatabaseMetaData md = conn.getMetaData();
List<String> allTables = new ArrayList<String>();
try (ResultSet tables = md.getTables(null, null, "%", null)) {
while (tables.next()) {
// 'toUpperCase' is needed since most databases
// are case insensitive, and some will convert names to
// lower
// case
// (PostgreSQL)
String name = tables.getString("TABLE_NAME");
if (name != null)
allTables.add(name.toUpperCase());
}
}
unitDbAdapter.willDropTables(conn, map, allTables);
// drop all tables in the map
try (Statement stmt = conn.createStatement()) {
ListIterator<DbEntity> it = list.listIterator(list.size());
while (it.hasPrevious()) {
DbEntity ent = it.previous();
if (!allTables.contains(ent.getName().toUpperCase())) {
continue;
}
for (String dropSql : node.getAdapter().dropTableStatements(ent)) {
try {
logger.info(dropSql);
stmt.execute(dropSql);
} catch (SQLException sqe) {
logger.warn("Can't drop table " + ent.getName() + ", ignoring...", sqe);
}
}
}
}
unitDbAdapter.droppedTables(conn, map);
}
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class SchemaBuilder method dbEntitiesInDeleteOrder.
protected List<DbEntity> dbEntitiesInDeleteOrder(DataMap dataMap) {
DataMap map = domain.getDataMap(dataMap.getName());
List<DbEntity> entities = new ArrayList<>(map.getDbEntities());
dbEntitiesFilter(entities);
domain.getEntitySorter().sortDbEntities(entities, true);
return entities;
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class SchemaBuilder method dbEntitiesFilter.
private void dbEntitiesFilter(List<DbEntity> entities) {
// filter various unsupported tests...
// LOBs
boolean excludeLOB = !unitDbAdapter.supportsLobs();
boolean excludeBinPK = !unitDbAdapter.supportsBinaryPK();
if (excludeLOB || excludeBinPK) {
List<DbEntity> filtered = new ArrayList<DbEntity>();
for (DbEntity ent : entities) {
// check for LOB attributes
if (excludeLOB) {
if (Arrays.binarySearch(EXTRA_EXCLUDED_FOR_NO_LOB, ent.getName()) >= 0) {
continue;
}
boolean hasLob = false;
for (final DbAttribute attr : ent.getAttributes()) {
if (attr.getType() == Types.BLOB || attr.getType() == Types.CLOB) {
hasLob = true;
break;
}
}
if (hasLob) {
continue;
}
}
// check for BIN PK
if (excludeBinPK) {
boolean skip = false;
for (final DbAttribute attr : ent.getAttributes()) {
// check for BIN PK or FK to BIN Pk
if (attr.getType() == Types.BINARY || attr.getType() == Types.VARBINARY || attr.getType() == Types.LONGVARBINARY) {
if (attr.isPrimaryKey() || attr.isForeignKey()) {
skip = true;
break;
}
}
}
if (skip) {
continue;
}
}
filtered.add(ent);
}
entities = filtered;
}
}
use of org.apache.cayenne.map.DbEntity in project cayenne by apache.
the class BatchActionLockingIT method testRunAsIndividualQueriesOptimisticLockingFailure.
@Test
public void testRunAsIndividualQueriesOptimisticLockingFailure() throws Exception {
EntityResolver resolver = runtime.getDataDomain().getEntityResolver();
// test with adapter that supports keys...
JdbcAdapter adapter = buildAdapter(true);
DbEntity dbEntity = resolver.getObjEntity(SimpleLockingTestEntity.class).getDbEntity();
List<DbAttribute> qualifierAttributes = Arrays.asList(dbEntity.getAttribute("LOCKING_TEST_ID"), dbEntity.getAttribute("NAME"));
Collection<String> nullAttributeNames = Collections.singleton("NAME");
Map<String, Object> qualifierSnapshot = new HashMap<>();
qualifierSnapshot.put("LOCKING_TEST_ID", 1);
DeleteBatchQuery batchQuery = new DeleteBatchQuery(dbEntity, qualifierAttributes, nullAttributeNames, 5);
batchQuery.setUsingOptimisticLocking(true);
batchQuery.add(qualifierSnapshot);
DeleteBatchTranslator batchQueryBuilder = new DeleteBatchTranslator(batchQuery, adapter, null);
MockConnection mockConnection = new MockConnection();
PreparedStatementResultSetHandler preparedStatementResultSetHandler = mockConnection.getPreparedStatementResultSetHandler();
preparedStatementResultSetHandler.setExactMatch(false);
preparedStatementResultSetHandler.setCaseSensitive(false);
preparedStatementResultSetHandler.prepareUpdateCount("DELETE", 0);
boolean generatesKeys = false;
DataNode node = new DataNode();
node.setAdapter(adapter);
node.setEntityResolver(resolver);
node.setRowReaderFactory(mock(RowReaderFactory.class));
BatchAction action = new BatchAction(batchQuery, node, false);
try {
action.runAsIndividualQueries(mockConnection, batchQueryBuilder, new MockOperationObserver(), generatesKeys);
fail("No OptimisticLockingFailureException thrown.");
} catch (OptimisticLockException e) {
}
assertEquals(0, mockConnection.getNumberCommits());
assertEquals(0, mockConnection.getNumberRollbacks());
}
Aggregations