use of org.hibernate.tool.hbm2ddl.SchemaExport in project midpoint by Evolveum.
the class SpringApplicationContextTest method createSQLSchema.
private void createSQLSchema(String fileName, String dialect) throws Exception {
org.hibernate.cfg.Configuration configuration = new Configuration();
configuration.setNamingStrategy(new MidPointNamingStrategy());
configuration.setProperties(sessionFactory.getHibernateProperties());
sessionFactory.getHibernateProperties().setProperty("hibernate.dialect", dialect);
System.out.println("Dialect: " + sessionFactory.getHibernateProperties().getProperty("hibernate.dialect"));
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.container", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.any", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.embedded", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.enums", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.id", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.other", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.type", configuration);
addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.audit", configuration);
// addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.poc", configuration);
configuration.addPackage("com.evolveum.midpoint.repo.sql.type");
SchemaExport export = new SchemaExport(configuration);
export.setOutputFile(fileName);
export.setDelimiter(";");
export.execute(true, false, false, true);
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project zhcet-web by zhcet-amu.
the class Hibernate5DDLExporter method schemaExport.
private Hibernate5DDLExporter schemaExport(String fileName, String targetDirectory) throws Exception {
if (entityPackages == null && entityPackages.length == 0) {
System.out.println("Not packages selected");
System.exit(0);
}
File exportFile = createExportFileAndMakeDirectory(fileName, targetDirectory);
PhysicalNamingStrategy physicalNamingStrategy;
ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySetting(AvailableSettings.DIALECT, dialect).applySetting(AvailableSettings.PHYSICAL_NAMING_STRATEGY, "org.springframework.boot.orm.jpa.hibernate.SpringPhysicalNamingStrategy").build();
MetadataImplementor metadata = (MetadataImplementor) mapAnnotatedClasses(serviceRegistry).buildMetadata();
SchemaExport schemaExport = new SchemaExport();
schemaExport.setOutputFile(exportFile.getAbsolutePath());
schemaExport.setDelimiter(";");
schemaExport.setFormat(true);
schemaExport.execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.CREATE, metadata, serviceRegistry);
((StandardServiceRegistryImpl) serviceRegistry).destroy();
System.out.println(exportFile.getAbsolutePath());
return this;
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project cas by apereo.
the class GenerateDdlCommand method generate.
/**
* Generate.
*
* @param file the file
* @param dialect the dialect
* @param delimiter the delimiter
* @param pretty the pretty
* @param dropSchema the drop schema
* @param createSchema the create schema
* @param haltOnError the halt on error
*/
@CliCommand(value = "generate-ddl", help = "Generate database DDL scripts")
public void generate(@CliOption(key = { "file" }, help = "DDL file to contain to generated script", specifiedDefaultValue = "/etc/cas/config/cas-db-schema.sql", unspecifiedDefaultValue = "/etc/cas/config/cas-db-schema.sql", optionContext = "DDL file to contain to generated script") final String file, @CliOption(key = { "dialect" }, help = "Database dialect class", specifiedDefaultValue = "HSQL", unspecifiedDefaultValue = "HSQL", optionContext = "Database dialect class") final String dialect, @CliOption(key = { "delimiter" }, help = "Delimiter to use for separation of statements when generating SQL", specifiedDefaultValue = ";", unspecifiedDefaultValue = ";", optionContext = "Delimiter to use for separation of statements when generating SQL") final String delimiter, @CliOption(key = { "pretty" }, help = "Format DDL scripts and pretty-print the output", specifiedDefaultValue = "true", unspecifiedDefaultValue = "true", optionContext = "Format DDL scripts and pretty-print the output") final boolean pretty, @CliOption(key = { "dropSchema" }, help = "Generate DROP SQL statements in the DDL", specifiedDefaultValue = "true", unspecifiedDefaultValue = "true", optionContext = "Generate DROP statements in the DDL") final boolean dropSchema, @CliOption(key = { "createSchema" }, help = "Generate DROP SQL statements in the DDL", specifiedDefaultValue = "true", unspecifiedDefaultValue = "true", optionContext = "Generate CREATE SQL statements in the DDL") final boolean createSchema, @CliOption(key = { "haltOnError" }, help = "Halt if an error occurs during the generation process", specifiedDefaultValue = "true", unspecifiedDefaultValue = "true", optionContext = "Halt if an error occurs during the generation process") final boolean haltOnError) {
final String dialectName = DIALECTS_MAP.getOrDefault(dialect.trim().toUpperCase(), dialect);
LOGGER.info("Using database dialect class [{}]", dialectName);
if (!dialectName.contains(".")) {
LOGGER.warn("Dialect name must be a fully qualified class name. Supported dialects by default are [{}] " + "or you may specify the dialect class directly", DIALECTS_MAP.keySet());
return;
}
final StandardServiceRegistryBuilder svcRegistry = new StandardServiceRegistryBuilder();
if (StringUtils.isNotBlank(dialectName)) {
svcRegistry.applySetting(AvailableSettings.DIALECT, dialect);
}
final MetadataSources metadata = new MetadataSources(svcRegistry.build());
REFLECTIONS.getTypesAnnotatedWith(MappedSuperclass.class).forEach(metadata::addAnnotatedClass);
REFLECTIONS.getTypesAnnotatedWith(Entity.class).forEach(metadata::addAnnotatedClass);
final SchemaExport export = new SchemaExport();
export.setDelimiter(delimiter);
export.setOutputFile(file);
export.setFormat(pretty);
export.setHaltOnError(haltOnError);
export.setManageNamespaces(true);
final SchemaExport.Action action;
if (createSchema && dropSchema) {
action = SchemaExport.Action.BOTH;
} else if (createSchema) {
action = SchemaExport.Action.CREATE;
} else if (dropSchema) {
action = SchemaExport.Action.DROP;
} else {
action = SchemaExport.Action.NONE;
}
LOGGER.info("Exporting Database DDL to [{}] using dialect [{}] with export type set to [{}]", file, dialect, action);
export.execute(EnumSet.of(TargetType.SCRIPT, TargetType.STDOUT), SchemaExport.Action.BOTH, metadata.buildMetadata());
LOGGER.info("Database DDL is exported to [{}]", file);
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project hibernate-orm by hibernate.
the class MigrationTest method testIndexCreationViaSchemaUpdate.
// /**
// * 3_Version.hbm.xml contains a named unique constraint and an un-named
// * unique constraint (will receive a randomly-generated name). Create
// * the original schema with 2_Version.hbm.xml. Then, run SchemaUpdate
// * TWICE using 3_Version.hbm.xml. Neither RECREATE_QUIETLY nor SKIP should
// * generate any exceptions.
// */
// @Test
// @TestForIssue( jiraKey = "HHH-8162" )
// public void testConstraintUpdate() {
// doConstraintUpdate(UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY);
// doConstraintUpdate(UniqueConstraintSchemaUpdateStrategy.RECREATE_QUIETLY);
// doConstraintUpdate(UniqueConstraintSchemaUpdateStrategy.SKIP);
// }
//
// private void doConstraintUpdate(UniqueConstraintSchemaUpdateStrategy strategy) {
// // original
// String resource1 = "org/hibernate/test/schemaupdate/2_Version.hbm.xml";
// // adds unique constraint
// String resource2 = "org/hibernate/test/schemaupdate/3_Version.hbm.xml";
//
// MetadataImplementor v1metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
// .addResource( resource1 )
// .buildMetadata();
// MetadataImplementor v2metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
// .addResource( resource2 )
// .buildMetadata();
//
// new SchemaExport( v1metadata ).execute( false, true, true, false );
//
// // adds unique constraint
// Configuration v2cfg = new Configuration();
// v2cfg.getProperties().put( AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, strategy );
// v2cfg.addResource( resource2 );
// SchemaUpdate v2schemaUpdate = new SchemaUpdate( serviceRegistry, v2cfg );
// v2schemaUpdate.execute( true, true );
// assertEquals( 0, v2schemaUpdate.getExceptions().size() );
//
// Configuration v3cfg = new Configuration();
// v3cfg.getProperties().put( AvailableSettings.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY, strategy );
// v3cfg.addResource( resource2 );
// SchemaUpdate v3schemaUpdate = new SchemaUpdate( serviceRegistry, v3cfg );
// v3schemaUpdate.execute( true, true );
// assertEquals( 0, v3schemaUpdate.getExceptions().size() );
//
// new SchemaExport( serviceRegistry, v3cfg ).drop( false, true );
// }
@Test
@TestForIssue(jiraKey = "HHH-9713")
public void testIndexCreationViaSchemaUpdate() {
MetadataImplementor metadata = (MetadataImplementor) new MetadataSources(serviceRegistry).addAnnotatedClass(EntityWithIndex.class).buildMetadata();
// drop and then create the schema
new SchemaExport().execute(EnumSet.of(TargetType.DATABASE), SchemaExport.Action.BOTH, metadata);
try {
// update the schema
new SchemaUpdate().execute(EnumSet.of(TargetType.DATABASE), metadata);
} finally {
// drop the schema
new SchemaExport().drop(EnumSet.of(TargetType.DATABASE), metadata);
}
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project hibernate-orm by hibernate.
the class MixedFieldPropertyAnnotationTest method setUp.
@Before
public void setUp() {
serviceRegistry = new StandardServiceRegistryBuilder().applySetting(Environment.GLOBALLY_QUOTED_IDENTIFIERS, "false").build();
metadata = (MetadataImplementor) new MetadataSources(serviceRegistry).addAnnotatedClass(MyEntity.class).buildMetadata();
System.out.println("********* Starting SchemaExport for START-UP *************************");
new SchemaExport().create(EnumSet.of(TargetType.STDOUT, TargetType.DATABASE), metadata);
System.out.println("********* Completed SchemaExport for START-UP *************************");
}
Aggregations