use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class RDBMSStoreManager method initialiseIdentifierFactory.
/**
* Method to create the IdentifierFactory to be used by this store.
* Relies on the datastore adapter existing before creation
* @param nucleusContext context
*/
protected void initialiseIdentifierFactory(NucleusContext nucleusContext) {
if (dba == null) {
throw new NucleusException("DatastoreAdapter not yet created so cannot create IdentifierFactory!");
}
String idFactoryName = getStringProperty(PropertyNames.PROPERTY_IDENTIFIER_FACTORY);
try {
// Create the control properties for identifier generation
Map props = new HashMap();
if (catalogName != null) {
props.put(IdentifierFactory.PROPERTY_DEFAULT_CATALOG, catalogName);
}
if (schemaName != null) {
props.put(IdentifierFactory.PROPERTY_DEFAULT_SCHEMA, schemaName);
}
String val = getStringProperty(PropertyNames.PROPERTY_IDENTIFIER_CASE);
props.put(IdentifierFactory.PROPERTY_REQUIRED_CASE, val != null ? val : getDefaultIdentifierCase());
val = getStringProperty(PropertyNames.PROPERTY_IDENTIFIER_WORD_SEPARATOR);
if (val != null) {
props.put(IdentifierFactory.PROPERTY_WORD_SEPARATOR, val);
}
val = getStringProperty(PropertyNames.PROPERTY_IDENTIFIER_TABLE_PREFIX);
if (val != null) {
props.put(IdentifierFactory.PROPERTY_TABLE_PREFIX, val);
}
val = getStringProperty(PropertyNames.PROPERTY_IDENTIFIER_TABLE_SUFFIX);
if (val != null) {
props.put(IdentifierFactory.PROPERTY_TABLE_SUFFIX, val);
}
props.put(IdentifierFactory.PROPERTY_NAMING_FACTORY, getNamingFactory());
// Create the IdentifierFactory
ClassLoaderResolver clr = nucleusContext.getClassLoaderResolver(null);
if ("datanucleus2".equalsIgnoreCase(idFactoryName)) {
identifierFactory = new DN2IdentifierFactory(dba, clr, props);
} else if ("jpa".equalsIgnoreCase(idFactoryName) || "jakarta".equalsIgnoreCase(idFactoryName)) {
identifierFactory = new JPAIdentifierFactory(dba, clr, props);
} else if ("datanucleus1".equalsIgnoreCase(idFactoryName)) {
identifierFactory = new DNIdentifierFactory(dba, clr, props);
} else if ("jpox".equalsIgnoreCase(idFactoryName)) {
identifierFactory = new JPOXIdentifierFactory(dba, clr, props);
} else {
// Fallback to the plugin mechanism
Class[] argTypes = new Class[] { DatastoreAdapter.class, ClassConstants.CLASS_LOADER_RESOLVER, Map.class };
Object[] args = new Object[] { dba, nucleusContext.getClassLoaderResolver(null), props };
identifierFactory = (IdentifierFactory) nucleusContext.getPluginManager().createExecutableExtension("org.datanucleus.store.rdbms.identifierfactory", "name", idFactoryName, "class-name", argTypes, args);
}
} catch (ClassNotFoundException cnfe) {
throw new NucleusUserException(Localiser.msg("039004", idFactoryName), cnfe).setFatal();
} catch (Exception e) {
NucleusLogger.PERSISTENCE.error("Exception creating IdentifierFactory", e);
throw new NucleusException(Localiser.msg("039005", idFactoryName), e).setFatal();
}
}
use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class BaseDatastoreAdapter method getCreateTableStatement.
/**
* Returns the appropriate SQL to create the given table having the given
* columns. No column constraints or key definitions should be included.
* It should return something like:
* <pre>
* CREATE TABLE FOO ( BAR VARCHAR(30), BAZ INTEGER )
* </pre>
*
* @param table The table to create.
* @param columns The columns of the table.
* @param props Properties for controlling the table creation
* @param factory Factory for identifiers
* @return The text of the SQL statement.
*/
public String getCreateTableStatement(TableImpl table, Column[] columns, Properties props, IdentifierFactory factory) {
StringBuilder createStmt = new StringBuilder();
String indent = " ";
if (getContinuationString().length() == 0) {
indent = "";
}
// CREATE TABLE with column specifiers
createStmt.append("CREATE TABLE ").append(table.toString()).append(getContinuationString()).append("(").append(getContinuationString());
for (int i = 0; i < columns.length; ++i) {
if (i > 0) {
createStmt.append(",").append(getContinuationString());
}
createStmt.append(indent).append(columns[i].getSQLDefinition());
}
// PRIMARY KEY(col[,col])
if (supportsOption(PRIMARYKEY_IN_CREATE_STATEMENTS)) {
PrimaryKey pk = table.getPrimaryKey();
if (pk != null && pk.getNumberOfColumns() > 0) {
boolean includePk = true;
if (supportsOption(IDENTITY_PK_IN_CREATE_TABLE_COLUMN_DEF)) {
for (Column pkCol : pk.getColumns()) {
if (pkCol.isIdentity()) {
// This column is auto-increment and is specified in the column def so ignore here
includePk = false;
break;
}
}
}
if (includePk) {
createStmt.append(",").append(getContinuationString());
if (pk.getName() != null) {
String identifier = factory.getIdentifierInAdapterCase(pk.getName());
createStmt.append(indent).append("CONSTRAINT ").append(identifier).append(" ").append(pk.toString());
} else {
createStmt.append(indent).append(pk.toString());
}
}
}
}
// UNIQUE( col [,col] )
if (supportsOption(UNIQUE_IN_END_CREATE_STATEMENTS)) {
StringBuilder uniqueConstraintStmt = new StringBuilder();
for (int i = 0; i < columns.length; ++i) {
if (columns[i].isUnique()) {
if (uniqueConstraintStmt.length() < 1) {
uniqueConstraintStmt.append(",").append(getContinuationString());
uniqueConstraintStmt.append(indent).append(" UNIQUE (");
} else {
uniqueConstraintStmt.append(",");
}
uniqueConstraintStmt.append(columns[i].getIdentifier().toString());
}
}
if (uniqueConstraintStmt.length() > 1) {
uniqueConstraintStmt.append(")");
createStmt.append(uniqueConstraintStmt.toString());
}
}
// FOREIGN KEY(col [,col] ) REFERENCES {TBL} (col [,col])
if (supportsOption(FK_IN_END_CREATE_STATEMENTS)) {
StringBuilder fkConstraintStmt = new StringBuilder();
ClassLoaderResolver clr = table.getStoreManager().getNucleusContext().getClassLoaderResolver(null);
List<ForeignKey> fks = table.getExpectedForeignKeys(clr);
if (fks != null && !fks.isEmpty()) {
for (ForeignKey fk : fks) {
NucleusLogger.GENERAL.debug(">> TODO Add FK in CREATE TABLE as " + fk);
// TODO Add the FK. Make sure that the other table exists
}
}
if (fkConstraintStmt.length() > 1) {
createStmt.append(fkConstraintStmt.toString());
}
}
// CHECK (column_identifier IN (literal[,literal]))
if (supportsOption(CHECK_IN_END_CREATE_STATEMENTS)) {
StringBuilder checkConstraintStmt = new StringBuilder();
for (int i = 0; i < columns.length; ++i) {
if (columns[i].getCheckConstraints() != null) {
checkConstraintStmt.append(",").append(getContinuationString());
checkConstraintStmt.append(indent).append(columns[i].getCheckConstraints());
}
}
if (checkConstraintStmt.length() > 1) {
createStmt.append(checkConstraintStmt.toString());
}
}
createStmt.append(getContinuationString()).append(")");
return createStmt.toString();
}
use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class BaseDatastoreAdapter method initialise.
public void initialise(StoreSchemaHandler handler, ManagedConnection mconn) {
// Initialise the datastore mappings for this datastore
RDBMSStoreManager storeMgr = (RDBMSStoreManager) handler.getStoreManager();
ClassLoaderResolver clr = storeMgr.getNucleusContext().getClassLoaderResolver(null);
loadColumnMappings(storeMgr.getNucleusContext().getPluginManager(), clr);
// Initialise any types, including artificial ones added for the datastore when not provided by the JDBC driver
initialiseTypes(handler, mconn);
RDBMSTypesInfo types = (RDBMSTypesInfo) handler.getSchemaData(mconn.getConnection(), RDBMSSchemaHandler.TYPE_TYPES, null);
Iterator<Map.Entry<Integer, String>> entryIter = supportedJdbcTypesById.entrySet().iterator();
while (entryIter.hasNext()) {
Map.Entry<Integer, String> entry = entryIter.next();
int jdbcType = entry.getKey();
if (types.getChild("" + jdbcType) == null) {
// JDBC type not supported by adapter so deregister the mapping. TODO Remove any entries from built-in types to avoid this
deregisterColumnMappingsForJDBCType(entry.getValue());
}
}
entryIter = unsupportedJdbcTypesById.entrySet().iterator();
while (entryIter.hasNext()) {
Map.Entry<Integer, String> entry = entryIter.next();
int jdbcType = entry.getKey();
if (types.getChild("" + jdbcType) == null) {
// JDBC type not supported by adapter so deregister the mapping. TODO Remove any entries from built-in types to avoid this
deregisterColumnMappingsForJDBCType(entry.getValue());
}
}
// Log the datastore mapping summary, for each Java type, showing the supported JDBC-types and SQL-types.
if (NucleusLogger.DATASTORE.isDebugEnabled()) {
Collection<String> javaTypes = new TreeSet<>(datastoreTypeMappingsByJavaType.keySet());
for (String javaType : javaTypes) {
ColumnTypeMappings datastoreTypeMappings = datastoreTypeMappingsByJavaType.get(javaType);
if (NucleusLogger.DATASTORE.isDebugEnabled()) {
NucleusLogger.DATASTORE.debug(Localiser.msg("054009", javaType, StringUtils.collectionToString(datastoreTypeMappings.columnMappingByJdbcType.keySet()), StringUtils.collectionToString(datastoreTypeMappings.columnMappingBySqlType.keySet()), datastoreTypeMappings.defaultJdbcType, datastoreTypeMappings.defaultSqlType));
}
}
}
}
use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class RDBMSPersistenceHandler method locateObject.
/**
* Locates this object in the datastore.
* @param sm StateManager for the object to be found
* @throws NucleusObjectNotFoundException if the object doesnt exist
* @throws NucleusDataStoreException when an error occurs in the datastore communication
*/
@Override
public void locateObject(DNStateManager sm) {
ClassLoaderResolver clr = sm.getExecutionContext().getClassLoaderResolver();
DatastoreClass table = getDatastoreClass(sm.getObject().getClass().getName(), clr);
getLocateRequest(table, sm.getObject().getClass().getName()).execute(sm);
}
use of org.datanucleus.ClassLoaderResolver in project datanucleus-rdbms by datanucleus.
the class RDBMSPersistenceHandler method insertObject.
/**
* Inserts a persistent object into the database.
* The insert can take place in several steps, one insert per table that it is stored in.
* e.g When persisting an object that uses "new-table" inheritance for each level of the inheritance tree then will get an INSERT into each table.
* When persisting an object that uses "complete-table" inheritance then will get a single INSERT into its table.
* @param sm StateManager for the object to be inserted.
* @throws NucleusDataStoreException when an error occurs in the datastore communication
*/
@Override
public void insertObject(DNStateManager sm) {
// Check if read-only so update not permitted
assertReadOnlyForUpdateOfObject(sm);
// Check if we need to do any updates to the schema before inserting this object
checkForSchemaUpdatesForFieldsOfObject(sm, sm.getLoadedFieldNumbers());
ExecutionContext ec = sm.getExecutionContext();
ClassLoaderResolver clr = sm.getExecutionContext().getClassLoaderResolver();
String className = sm.getClassMetaData().getFullClassName();
DatastoreClass dc = getDatastoreClass(className, clr);
if (dc == null) {
if (sm.getClassMetaData().getInheritanceMetaData().getStrategy() == InheritanceStrategy.SUBCLASS_TABLE) {
throw new NucleusUserException(Localiser.msg("032013", className));
}
throw new NucleusException(Localiser.msg("032014", className, sm.getClassMetaData().getInheritanceMetaData().getStrategy())).setFatal();
}
if (ec.getStatistics() != null) {
ec.getStatistics().incrementInsertCount();
}
insertObjectInTable(dc, sm, clr);
}
Aggregations