use of org.hibernate.tool.hbm2ddl.SchemaExport in project hibernate-orm by hibernate.
the class ForeignKeyDropTest method setUp.
@Before
public void setUp() throws Exception {
output = File.createTempFile("update_script", ".sql");
output.deleteOnExit();
ssr = new StandardServiceRegistryBuilder().applySetting(Environment.HBM2DDL_AUTO, "none").applySetting(Environment.FORMAT_SQL, "false").applySetting(Environment.SHOW_SQL, "true").build();
metadata = (MetadataImplementor) new MetadataSources(ssr).addAnnotatedClass(ParentEntity.class).addAnnotatedClass(ChildEntity.class).buildMetadata();
metadata.validate();
schemaExport = new SchemaExport().setHaltOnError(false).setOutputFile(output.getAbsolutePath());
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project hibernate-orm by hibernate.
the class SchemaCreationTest method testUniqueConstraintIsCorrectlyGenerated.
@Test
@TestForIssue(jiraKey = "HHH-10553")
public void testUniqueConstraintIsCorrectlyGenerated() throws Exception {
final MetadataSources metadataSources = new MetadataSources(ssr);
metadataSources.addAnnotatedClass(Element.class);
metadataSources.addAnnotatedClass(Category.class);
metadata = (MetadataImplementor) metadataSources.buildMetadata();
metadata.validate();
final SchemaExport schemaExport = new SchemaExport().setHaltOnError(true).setOutputFile(output.getAbsolutePath()).setFormat(false);
schemaExport.create(EnumSet.of(TargetType.SCRIPT), metadata);
final List<String> sqlLines = Files.readAllLines(output.toPath(), Charset.defaultCharset());
boolean isUniqueConstraintCreated = false;
for (String statement : sqlLines) {
assertThat("Should not try to create the unique constraint for the non existing table element", statement.toLowerCase().matches(dialect.getAlterTableString("element")), is(false));
if (dialect instanceof DB2Dialect) {
if (statement.toLowerCase().startsWith("create unique index") && statement.toLowerCase().contains("category (code)")) {
isUniqueConstraintCreated = true;
}
} else if (dialect instanceof PostgreSQL81Dialect) {
if (statement.toLowerCase().startsWith("alter table if exists category add constraint") && statement.toLowerCase().contains("unique (code)")) {
isUniqueConstraintCreated = true;
}
} else {
if (statement.toLowerCase().startsWith("alter table category add constraint") && statement.toLowerCase().contains("unique (code)")) {
isUniqueConstraintCreated = true;
}
}
}
assertThat("Unique constraint for table category is not created", isUniqueConstraintCreated, is(true));
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project giftcard-demo-series by AxonIQ.
the class EventStoreJpaConfig method main.
/**
**********************************************************************
* Simple utility to generate an initial version of the DDL
***********************************************************************
*/
public static void main(String[] args) {
Map<String, Object> settings = new HashMap<>();
settings.put("hibernate.dialect", PostgreSQL94Dialect.class);
settings.put("hibernate.implicit_naming_strategy", SpringImplicitNamingStrategy.class);
settings.put("hibernate.physical_naming_strategy", SpringPhysicalNamingStrategy.class);
StandardServiceRegistry standardServiceRegistry = new StandardServiceRegistryBuilder().applySettings(settings).build();
MetadataSources metadataSources = new MetadataSources(standardServiceRegistry);
metadataSources.addAnnotatedClass(DomainEventEntry.class);
metadataSources.addAnnotatedClass(SnapshotEventEntry.class);
Metadata metadata = metadataSources.buildMetadata();
SchemaExport schemaExport = new SchemaExport((MetadataImplementor) metadata);
schemaExport.setFormat(true);
schemaExport.setDelimiter(";");
schemaExport.create(Target.SCRIPT);
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project giftcard-demo-series by AxonIQ.
the class PrimaryJpaConfig method main.
/**
**********************************************************************
* Simple utility to generate an initial version of the DDL
***********************************************************************
*/
public static void main(String[] args) {
Map<String, Object> settings = new HashMap<>();
settings.put("hibernate.dialect", PostgreSQL94Dialect.class);
settings.put("hibernate.implicit_naming_strategy", SpringImplicitNamingStrategy.class);
settings.put("hibernate.physical_naming_strategy", SpringPhysicalNamingStrategy.class);
StandardServiceRegistry standardServiceRegistry = new StandardServiceRegistryBuilder().applySettings(settings).build();
MetadataSources metadataSources = new MetadataSources(standardServiceRegistry);
metadataSources.addAnnotatedClass(TokenEntry.class);
metadataSources.addAnnotatedClass(CardSummary.class);
Metadata metadata = metadataSources.buildMetadata();
SchemaExport schemaExport = new SchemaExport((MetadataImplementor) metadata);
schemaExport.setFormat(true);
schemaExport.setDelimiter(";");
schemaExport.create(Target.SCRIPT);
}
use of org.hibernate.tool.hbm2ddl.SchemaExport in project cas by apereo.
the class GenerateDdlCommand method generate.
/**
* Generate.
*
* @param file the file
* @param dialect the dialect
* @param jdbcUrl the jdbc url
* @param delimiter the delimiter
* @param pretty the pretty
* @param dropSchema the drop schema
* @param createSchema the create schema
* @param haltOnError the halt on error
* @return the file
*/
@ShellMethod(key = "generate-ddl", value = "Generate database DDL scripts")
public String generate(@ShellOption(value = { "file", "--file" }, help = "DDL file to contain to generated script", defaultValue = "/etc/cas/config/cas-db-schema.sql") final String file, @ShellOption(value = { "dialect", "--dialect" }, help = "Database dialect class", defaultValue = "HSQL") final String dialect, @ShellOption(value = { "url", "--url" }, help = "JDBC database connection URL", defaultValue = "jdbc:hsqldb:mem:cas") final String jdbcUrl, @ShellOption(value = { "delimiter", "--delimiter" }, help = "Delimiter to use for separation of statements when generating SQL", defaultValue = ";") final String delimiter, @ShellOption(value = { "pretty", "--pretty" }, help = "Format DDL scripts and pretty-print the output", defaultValue = "false") final Boolean pretty, @ShellOption(value = { "dropSchema", "--dropSchema" }, help = "Generate DROP SQL statements in the DDL", defaultValue = "false") final Boolean dropSchema, @ShellOption(value = { "createSchema", "--createSchema" }, help = "Generate DROP SQL statements in the DDL", defaultValue = "false") final Boolean createSchema, @ShellOption(value = { "haltOnError", "--haltOnError" }, help = "Halt if an error occurs during the generation process", defaultValue = "false") final Boolean haltOnError) {
LOGGER.info("Requested database dialect type [{}]", dialect);
val dialectName = DIALECTS_MAP.getOrDefault(dialect.trim(), dialect);
LOGGER.info("Using database dialect class [{}]", dialectName);
if (!dialectName.contains(".")) {
LOGGER.warn("Dialect name must be a fully qualified class name. Supported dialects by default are [{}] " + "or you may specify the dialect class directly", DIALECTS_MAP.keySet());
return null;
}
val svcRegistry = new StandardServiceRegistryBuilder();
val settings = new HashMap<String, String>();
settings.put(AvailableSettings.DIALECT, dialectName);
settings.put(AvailableSettings.URL, jdbcUrl);
settings.put(AvailableSettings.HBM2DDL_AUTO, "none");
settings.put(AvailableSettings.SHOW_SQL, "true");
svcRegistry.applySettings(settings);
LOGGER.info("Collecting entity metadata sources...");
val metadata = new MetadataSources(svcRegistry.build());
REFLECTIONS.getTypesAnnotatedWith(MappedSuperclass.class).forEach(metadata::addAnnotatedClass);
REFLECTIONS.getTypesAnnotatedWith(Entity.class).forEach(metadata::addAnnotatedClass);
val metadataSources = metadata.buildMetadata();
val export = new SchemaExport();
export.setDelimiter(delimiter);
export.setOutputFile(file);
export.setFormat(BooleanUtils.toBoolean(pretty));
export.setHaltOnError(BooleanUtils.toBoolean(haltOnError));
export.setManageNamespaces(true);
val action = getAction(BooleanUtils.toBoolean(dropSchema), BooleanUtils.toBoolean(createSchema));
LOGGER.info("Exporting Database DDL to [{}] using dialect [{}] with export type set to [{}]", file, dialect, action);
export.execute(EnumSet.of(TargetType.SCRIPT, TargetType.STDOUT), SchemaExport.Action.BOTH, metadataSources);
LOGGER.info("Database DDL is exported to [{}]", file);
return file;
}
Aggregations