use of org.hibernate.boot.model.relational.SqlStringGenerationContext in project hibernate-orm by hibernate.
the class SchemaDropperImpl method dropFromMetadata.
private void dropFromMetadata(Metadata metadata, ExecutionOptions options, ContributableMatcher contributableInclusionFilter, Dialect dialect, Formatter formatter, GenerationTarget... targets) {
final Database database = metadata.getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(metadata.getDatabase().getJdbcEnvironment(), database, options.getConfigurationValues());
boolean tryToDropCatalogs = false;
boolean tryToDropSchemas = false;
if (options.shouldManageNamespaces()) {
if (dialect.canCreateSchema()) {
tryToDropSchemas = true;
}
if (dialect.canCreateCatalog()) {
tryToDropCatalogs = true;
}
}
final Set<String> exportIdentifiers = CollectionHelper.setOfSize(50);
for (AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects()) {
if (!auxiliaryDatabaseObject.beforeTablesOnCreation()) {
continue;
}
if (!auxiliaryDatabaseObject.appliesToDialect(dialect)) {
continue;
}
applySqlStrings(dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings(auxiliaryDatabaseObject, metadata, sqlStringGenerationContext), formatter, options, targets);
}
for (Namespace namespace : database.getNamespaces()) {
if (!options.getSchemaFilter().includeNamespace(namespace)) {
continue;
}
// we need to drop all constraints/indexes prior to dropping the tables
applyConstraintDropping(namespace, metadata, formatter, options, sqlStringGenerationContext, contributableInclusionFilter, targets);
// now it's safe to drop the tables
for (Table table : namespace.getTables()) {
if (!table.isPhysicalTable()) {
continue;
}
if (!options.getSchemaFilter().includeTable(table)) {
continue;
}
if (!contributableInclusionFilter.matches(table)) {
continue;
}
checkExportIdentifier(table, exportIdentifiers);
applySqlStrings(dialect.getTableExporter().getSqlDropStrings(table, metadata, sqlStringGenerationContext), formatter, options, targets);
}
for (Sequence sequence : namespace.getSequences()) {
if (!options.getSchemaFilter().includeSequence(sequence)) {
continue;
}
if (!contributableInclusionFilter.matches(sequence)) {
continue;
}
checkExportIdentifier(sequence, exportIdentifiers);
applySqlStrings(dialect.getSequenceExporter().getSqlDropStrings(sequence, metadata, sqlStringGenerationContext), formatter, options, targets);
}
}
for (AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects()) {
if (auxiliaryDatabaseObject.beforeTablesOnCreation()) {
continue;
}
if (!auxiliaryDatabaseObject.appliesToDialect(dialect)) {
continue;
}
applySqlStrings(auxiliaryDatabaseObject.sqlDropStrings(sqlStringGenerationContext), formatter, options, targets);
}
if (tryToDropCatalogs || tryToDropSchemas) {
Set<Identifier> exportedCatalogs = new HashSet<>();
for (Namespace namespace : database.getNamespaces()) {
if (!options.getSchemaFilter().includeNamespace(namespace)) {
continue;
}
if (tryToDropSchemas && namespace.getPhysicalName().getSchema() != null) {
applySqlStrings(dialect.getDropSchemaCommand(namespace.getPhysicalName().getSchema().render(dialect)), formatter, options, targets);
}
if (tryToDropCatalogs) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog();
if (catalogPhysicalName != null && !exportedCatalogs.contains(catalogLogicalName)) {
applySqlStrings(dialect.getDropCatalogCommand(catalogPhysicalName.render(dialect)), formatter, options, targets);
exportedCatalogs.add(catalogLogicalName);
}
}
}
}
}
use of org.hibernate.boot.model.relational.SqlStringGenerationContext in project hibernate-orm by hibernate.
the class GeneratedValueTests method testExplicitSequenceGeneratorImplicitNamePreferGeneratorName.
@Test
public void testExplicitSequenceGeneratorImplicitNamePreferGeneratorName() {
// this should be the default behavior
try (final StandardServiceRegistry ssr = new StandardServiceRegistryBuilder().build()) {
final MetadataImplementor bootModel = (MetadataImplementor) new MetadataSources(ssr).addAnnotatedClass(ExplicitSequenceGeneratorImplicitNameEntity.class).buildMetadata();
final PersistentClass entityMapping = bootModel.getEntityBinding(ExplicitSequenceGeneratorImplicitNameEntity.class.getName());
final IdentifierGenerator generator = entityMapping.getIdentifier().createIdentifierGenerator(bootModel.getMetadataBuildingOptions().getIdentifierGeneratorFactory(), ssr.getService(JdbcEnvironment.class).getDialect(), null, null, (RootClass) entityMapping);
Database database = bootModel.getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.forTests(database.getJdbcEnvironment());
generator.initialize(sqlStringGenerationContext);
final SequenceStyleGenerator sequenceStyleGenerator = assertTyping(SequenceStyleGenerator.class, generator);
// all the JPA defaults since they were not defined
assertThat(sequenceStyleGenerator.getDatabaseStructure().getPhysicalName().render(), is("my_db_sequence"));
assertThat(sequenceStyleGenerator.getDatabaseStructure().getInitialValue(), is(100));
assertThat(sequenceStyleGenerator.getDatabaseStructure().getIncrementSize(), is(500));
final Sequence sequence = database.getDefaultNamespace().locateSequence(Identifier.toIdentifier("my_db_sequence"));
assertThat(sequence, notNullValue());
assertThat(sequence.getName().getSequenceName().getText(), is("my_db_sequence"));
assertThat(sequence.getInitialValue(), is(100));
assertThat(sequence.getIncrementSize(), is(500));
final String[] sqlCreateStrings = new H2Dialect().getSequenceExporter().getSqlCreateStrings(sequence, bootModel, sqlStringGenerationContext);
assertThat(sqlCreateStrings.length, is(1));
final String cmd = sqlCreateStrings[0].toLowerCase();
assertTrue(cmd.startsWith("create sequence my_db_sequence start with 100 increment by 500"));
}
}
use of org.hibernate.boot.model.relational.SqlStringGenerationContext in project hibernate-orm by hibernate.
the class MonotonicRevisionNumberTest method testOracleSequenceOrder.
@Test
public void testOracleSequenceOrder() {
EntityPersister persister = sessionFactory().getMappingMetamodel().getEntityDescriptor(SequenceIdRevisionEntity.class.getName());
IdentifierGenerator generator = persister.getIdentifierGenerator();
Assert.assertTrue(OrderedSequenceGenerator.class.isInstance(generator));
Database database = metadata().getDatabase();
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.forTests(database.getJdbcEnvironment());
Optional<AuxiliaryDatabaseObject> sequenceOptional = database.getAuxiliaryDatabaseObjects().stream().filter(o -> "REVISION_GENERATOR".equals(o.getExportIdentifier())).findFirst();
assertThat(sequenceOptional).isPresent();
String[] sqlCreateStrings = sequenceOptional.get().sqlCreateStrings(sqlStringGenerationContext);
Assert.assertTrue("Oracle sequence needs to be ordered in RAC environment.", sqlCreateStrings[0].toLowerCase().endsWith(" order"));
}
use of org.hibernate.boot.model.relational.SqlStringGenerationContext in project hibernate-orm by hibernate.
the class MappingModelCreationHelper method buildPluralAttributeMapping.
@SuppressWarnings("rawtypes")
public static PluralAttributeMapping buildPluralAttributeMapping(String attrName, int stateArrayPosition, Property bootProperty, ManagedMappingType declaringType, PropertyAccess propertyAccess, CascadeStyle cascadeStyle, FetchMode fetchMode, MappingModelCreationProcess creationProcess) {
final Collection bootValueMapping = (Collection) bootProperty.getValue();
final RuntimeModelCreationContext creationContext = creationProcess.getCreationContext();
final SessionFactoryImplementor sessionFactory = creationContext.getSessionFactory();
final SqlStringGenerationContext sqlStringGenerationContext = sessionFactory.getSqlStringGenerationContext();
final Dialect dialect = sqlStringGenerationContext.getDialect();
final MappingMetamodel domainModel = creationContext.getDomainModel();
final CollectionPersister collectionDescriptor = domainModel.findCollectionDescriptor(bootValueMapping.getRole());
assert collectionDescriptor != null;
final String tableExpression = ((Joinable) collectionDescriptor).getTableName();
final String sqlAliasStem = SqlAliasStemHelper.INSTANCE.generateStemFromAttributeName(bootProperty.getName());
final CollectionMappingType<?> collectionMappingType;
final JavaTypeRegistry jtdRegistry = creationContext.getJavaTypeRegistry();
final CollectionPart elementDescriptor = interpretElement(bootValueMapping, tableExpression, collectionDescriptor, sqlAliasStem, dialect, creationProcess);
final CollectionPart indexDescriptor;
CollectionIdentifierDescriptor identifierDescriptor = null;
final CollectionSemantics<?, ?> collectionSemantics = collectionDescriptor.getCollectionSemantics();
switch(collectionSemantics.getCollectionClassification()) {
case ARRAY:
{
collectionMappingType = new CollectionMappingTypeImpl(jtdRegistry.getDescriptor(Object[].class), StandardArraySemantics.INSTANCE);
final BasicValue index = (BasicValue) ((IndexedCollection) bootValueMapping).getIndex();
final SelectableMapping selectableMapping = SelectableMappingImpl.from(tableExpression, index.getSelectables().get(0), creationContext.getTypeConfiguration().getBasicTypeForJavaType(Integer.class), dialect, creationProcess.getSqmFunctionRegistry());
indexDescriptor = new BasicValuedCollectionPart(collectionDescriptor, CollectionPart.Nature.INDEX, // no converter
null, selectableMapping);
break;
}
case BAG:
{
collectionMappingType = new CollectionMappingTypeImpl(jtdRegistry.getDescriptor(java.util.Collection.class), StandardBagSemantics.INSTANCE);
indexDescriptor = null;
break;
}
case ID_BAG:
{
collectionMappingType = new CollectionMappingTypeImpl(jtdRegistry.getDescriptor(java.util.Collection.class), StandardIdentifierBagSemantics.INSTANCE);
indexDescriptor = null;
assert collectionDescriptor instanceof SQLLoadableCollection;
final SQLLoadableCollection loadableCollection = (SQLLoadableCollection) collectionDescriptor;
final String identifierColumnName = loadableCollection.getIdentifierColumnName();
assert identifierColumnName != null;
identifierDescriptor = new CollectionIdentifierDescriptorImpl(collectionDescriptor, tableExpression, identifierColumnName, (BasicType) loadableCollection.getIdentifierType());
break;
}
case LIST:
{
final BasicValue index = (BasicValue) ((IndexedCollection) bootValueMapping).getIndex();
final SelectableMapping selectableMapping = SelectableMappingImpl.from(tableExpression, index.getSelectables().get(0), creationContext.getTypeConfiguration().getBasicTypeForJavaType(Integer.class), dialect, creationProcess.getSqmFunctionRegistry());
indexDescriptor = new BasicValuedCollectionPart(collectionDescriptor, CollectionPart.Nature.INDEX, // no converter
null, selectableMapping);
collectionMappingType = new CollectionMappingTypeImpl(jtdRegistry.getDescriptor(List.class), StandardListSemantics.INSTANCE);
break;
}
case MAP:
case ORDERED_MAP:
case SORTED_MAP:
{
final Class<? extends java.util.Map> mapJavaType = collectionSemantics.getCollectionClassification() == CollectionClassification.SORTED_MAP ? SortedMap.class : java.util.Map.class;
collectionMappingType = new CollectionMappingTypeImpl(jtdRegistry.getDescriptor(mapJavaType), collectionSemantics);
final String mapKeyTableExpression;
if (bootValueMapping instanceof Map && ((Map) bootValueMapping).getMapKeyPropertyName() != null) {
mapKeyTableExpression = getTableIdentifierExpression(((Map) bootValueMapping).getIndex().getTable(), creationProcess);
} else {
mapKeyTableExpression = tableExpression;
}
indexDescriptor = interpretMapKey(bootValueMapping, collectionDescriptor, mapKeyTableExpression, sqlAliasStem, dialect, creationProcess);
break;
}
case SET:
case ORDERED_SET:
case SORTED_SET:
{
final Class<? extends java.util.Set> setJavaType = collectionSemantics.getCollectionClassification() == CollectionClassification.SORTED_MAP ? SortedSet.class : java.util.Set.class;
collectionMappingType = new CollectionMappingTypeImpl(jtdRegistry.getDescriptor(setJavaType), collectionSemantics);
indexDescriptor = null;
break;
}
default:
{
throw new MappingException("Unexpected CollectionClassification : " + collectionSemantics.getCollectionClassification());
}
}
final StateArrayContributorMetadata contributorMetadata = new StateArrayContributorMetadata() {
@Override
public PropertyAccess getPropertyAccess() {
return propertyAccess;
}
@Override
public MutabilityPlan getMutabilityPlan() {
return ImmutableMutabilityPlan.instance();
}
@Override
public boolean isNullable() {
return bootProperty.isOptional();
}
@Override
public boolean isInsertable() {
return bootProperty.isInsertable();
}
@Override
public boolean isUpdatable() {
return bootProperty.isUpdateable();
}
@Override
public boolean isIncludedInDirtyChecking() {
return false;
}
@Override
public boolean isIncludedInOptimisticLocking() {
return bootProperty.isOptimisticLocked();
}
@Override
public CascadeStyle getCascadeStyle() {
return cascadeStyle;
}
};
final FetchStyle style = FetchOptionsHelper.determineFetchStyleByMetadata(fetchMode, collectionDescriptor.getCollectionType(), sessionFactory);
final FetchTiming timing = FetchOptionsHelper.determineFetchTiming(style, collectionDescriptor.getCollectionType(), collectionDescriptor.isLazy(), collectionDescriptor.getRole(), sessionFactory);
final PluralAttributeMappingImpl pluralAttributeMapping = new PluralAttributeMappingImpl(attrName, bootValueMapping, propertyAccess, entityMappingType -> contributorMetadata, collectionMappingType, stateArrayPosition, elementDescriptor, indexDescriptor, identifierDescriptor, timing, style, cascadeStyle, declaringType, collectionDescriptor);
creationProcess.registerInitializationCallback("PluralAttributeMapping(" + bootValueMapping.getRole() + ")#finishInitialization", () -> {
pluralAttributeMapping.finishInitialization(bootProperty, bootValueMapping, creationProcess);
return true;
});
creationProcess.registerInitializationCallback("PluralAttributeMapping(" + bootValueMapping.getRole() + ") - key descriptor", () -> {
interpretPluralAttributeMappingKeyDescriptor(pluralAttributeMapping, bootValueMapping, collectionDescriptor, declaringType, dialect, creationProcess);
return true;
});
return pluralAttributeMapping;
}
use of org.hibernate.boot.model.relational.SqlStringGenerationContext in project hibernate-orm by hibernate.
the class AbstractSchemaMigrator method doMigration.
@Override
public void doMigration(Metadata metadata, ExecutionOptions options, ContributableMatcher contributableInclusionFilter, TargetDescriptor targetDescriptor) {
SqlStringGenerationContext sqlStringGenerationContext = SqlStringGenerationContextImpl.fromConfigurationMap(tool.getServiceRegistry().getService(JdbcEnvironment.class), metadata.getDatabase(), options.getConfigurationValues());
if (!targetDescriptor.getTargetTypes().isEmpty()) {
final JdbcContext jdbcContext = tool.resolveJdbcContext(options.getConfigurationValues());
final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator(jdbcContext);
try {
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(tool.getServiceRegistry(), ddlTransactionIsolator, sqlStringGenerationContext, tool);
final GenerationTarget[] targets = tool.buildGenerationTargets(targetDescriptor, ddlTransactionIsolator, options.getConfigurationValues());
try {
for (GenerationTarget target : targets) {
target.prepare();
}
try {
performMigration(metadata, databaseInformation, options, contributableInclusionFilter, jdbcContext.getDialect(), sqlStringGenerationContext, targets);
} finally {
for (GenerationTarget target : targets) {
try {
target.release();
} catch (Exception e) {
log.debugf("Problem releasing GenerationTarget [%s] : %s", target, e.getMessage());
}
}
}
} finally {
try {
databaseInformation.cleanup();
} catch (Exception e) {
log.debug("Problem releasing DatabaseInformation : " + e.getMessage());
}
}
} finally {
ddlTransactionIsolator.release();
}
}
}
Aggregations