use of org.hibernate.engine.jdbc.env.spi.JdbcEnvironment in project hibernate-orm by hibernate.
the class IncrementGenerator method configure.
@Override
public void configure(Type type, Properties params, ServiceRegistry serviceRegistry) throws MappingException {
returnClass = type.getReturnedClass();
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService(JdbcEnvironment.class);
final ObjectNameNormalizer normalizer = (ObjectNameNormalizer) params.get(PersistentIdentifierGenerator.IDENTIFIER_NORMALIZER);
String column = params.getProperty("column");
if (column == null) {
column = params.getProperty(PersistentIdentifierGenerator.PK);
}
column = normalizer.normalizeIdentifierQuoting(column).render(jdbcEnvironment.getDialect());
String tableList = params.getProperty("tables");
if (tableList == null) {
tableList = params.getProperty(PersistentIdentifierGenerator.TABLES);
}
String[] tables = StringHelper.split(", ", tableList);
final String schema = normalizer.toDatabaseIdentifierText(params.getProperty(PersistentIdentifierGenerator.SCHEMA));
final String catalog = normalizer.toDatabaseIdentifierText(params.getProperty(PersistentIdentifierGenerator.CATALOG));
StringBuilder buf = new StringBuilder();
for (int i = 0; i < tables.length; i++) {
final String tableName = normalizer.toDatabaseIdentifierText(tables[i]);
if (tables.length > 1) {
buf.append("select max(").append(column).append(") as mx from ");
}
buf.append(Table.qualify(catalog, schema, tableName));
if (i < tables.length - 1) {
buf.append(" union ");
}
}
if (tables.length > 1) {
buf.insert(0, "( ").append(" ) ids_");
column = "ids_.mx";
}
sql = "select max(" + column + ") from " + buf.toString();
}
use of org.hibernate.engine.jdbc.env.spi.JdbcEnvironment in project hibernate-orm by hibernate.
the class MultipleHiLoPerTableGenerator method registerExportables.
@Override
public void registerExportables(Database database) {
final Namespace namespace = database.locateNamespace(qualifiedTableName.getCatalogName(), qualifiedTableName.getSchemaName());
Table table = namespace.locateTable(qualifiedTableName.getObjectName());
if (table == null) {
table = namespace.createTable(qualifiedTableName.getObjectName(), false);
// todo : note sure the best solution here. do we add the columns if missing? other?
table.setPrimaryKey(new PrimaryKey(table));
final Column pkColumn = new ExportableColumn(database, table, segmentColumnName, StringType.INSTANCE, database.getDialect().getTypeName(Types.VARCHAR, keySize, 0, 0));
pkColumn.setNullable(false);
table.addColumn(pkColumn);
table.getPrimaryKey().addColumn(pkColumn);
final Column valueColumn = new ExportableColumn(database, table, valueColumnName, LongType.INSTANCE);
table.addColumn(valueColumn);
}
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
// allow physical naming strategies a chance to kick in
tableName = jdbcEnvironment.getQualifiedObjectNameFormatter().format(table.getQualifiedTableName(), jdbcEnvironment.getDialect());
query = "select " + valueColumnName + " from " + jdbcEnvironment.getDialect().appendLockHint(LockMode.PESSIMISTIC_WRITE, tableName) + " where " + segmentColumnName + " = '" + segmentName + "'" + jdbcEnvironment.getDialect().getForUpdateString();
update = "update " + tableName + " set " + valueColumnName + " = ? where " + valueColumnName + " = ? and " + segmentColumnName + " = '" + segmentName + "'";
insert = "insert into " + tableName + "(" + segmentColumnName + ", " + valueColumnName + ") " + "values('" + segmentName + "', ?)";
}
use of org.hibernate.engine.jdbc.env.spi.JdbcEnvironment in project hibernate-orm by hibernate.
the class Table method sqlAlterStrings.
public Iterator sqlAlterStrings(Dialect dialect, Metadata metadata, TableInformation tableInfo, String defaultCatalog, String defaultSchema) throws HibernateException {
final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment();
StringBuilder root = new StringBuilder("alter table ").append(jdbcEnvironment.getQualifiedObjectNameFormatter().format(tableInfo.getName(), dialect)).append(' ').append(dialect.getAddColumnString());
Iterator iter = getColumnIterator();
List results = new ArrayList();
while (iter.hasNext()) {
final Column column = (Column) iter.next();
final ColumnInformation columnInfo = tableInfo.getColumn(Identifier.toIdentifier(column.getName(), column.isQuoted()));
if (columnInfo == null) {
// the column doesnt exist at all.
StringBuilder alter = new StringBuilder(root.toString()).append(' ').append(column.getQuotedName(dialect)).append(' ').append(column.getSqlType(dialect, metadata));
String defaultValue = column.getDefaultValue();
if (defaultValue != null) {
alter.append(" default ").append(defaultValue);
}
if (column.isNullable()) {
alter.append(dialect.getNullColumnString());
} else {
alter.append(" not null");
}
if (column.isUnique()) {
String keyName = Constraint.generateName("UK_", this, column);
UniqueKey uk = getOrCreateUniqueKey(keyName);
uk.addColumn(column);
alter.append(dialect.getUniqueDelegate().getColumnDefinitionUniquenessFragment(column));
}
if (column.hasCheckConstraint() && dialect.supportsColumnCheck()) {
alter.append(" check(").append(column.getCheckConstraint()).append(")");
}
String columnComment = column.getComment();
if (columnComment != null) {
alter.append(dialect.getColumnComment(columnComment));
}
alter.append(dialect.getAddColumnSuffixString());
results.add(alter.toString());
}
}
if (results.isEmpty()) {
log.debugf("No alter strings for table : %s", getQuotedName());
}
return results.iterator();
}
use of org.hibernate.engine.jdbc.env.spi.JdbcEnvironment in project hibernate-orm by hibernate.
the class Index method buildSqlCreateIndexString.
public static String buildSqlCreateIndexString(Dialect dialect, String name, Table table, Iterator<Column> columns, java.util.Map<Column, String> columnOrderMap, boolean unique, Metadata metadata) {
final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment();
final String tableName = jdbcEnvironment.getQualifiedObjectNameFormatter().format(table.getQualifiedTableName(), dialect);
return buildSqlCreateIndexString(dialect, name, tableName, columns, columnOrderMap, unique);
}
use of org.hibernate.engine.jdbc.env.spi.JdbcEnvironment in project hibernate-orm by hibernate.
the class SchemaDropperImpl method dropFromMetadata.
private void dropFromMetadata(Metadata metadata, ExecutionOptions options, Dialect dialect, Formatter formatter, GenerationTarget... targets) {
final Database database = metadata.getDatabase();
final JdbcEnvironment jdbcEnvironment = database.getJdbcEnvironment();
boolean tryToDropCatalogs = false;
boolean tryToDropSchemas = false;
if (options.shouldManageNamespaces()) {
if (dialect.canCreateSchema()) {
tryToDropSchemas = true;
}
if (dialect.canCreateCatalog()) {
tryToDropCatalogs = true;
}
}
final Set<String> exportIdentifiers = new HashSet<String>(50);
for (AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects()) {
if (!auxiliaryDatabaseObject.beforeTablesOnCreation()) {
continue;
}
if (!auxiliaryDatabaseObject.appliesToDialect(dialect)) {
continue;
}
applySqlStrings(dialect.getAuxiliaryDatabaseObjectExporter().getSqlDropStrings(auxiliaryDatabaseObject, metadata), formatter, options, targets);
}
for (Namespace namespace : database.getNamespaces()) {
if (!schemaFilter.includeNamespace(namespace)) {
continue;
}
// we need to drop all constraints/indexes prior to dropping the tables
applyConstraintDropping(namespace, metadata, formatter, options, targets);
// now it's safe to drop the tables
for (Table table : namespace.getTables()) {
if (!table.isPhysicalTable()) {
continue;
}
if (!schemaFilter.includeTable(table)) {
continue;
}
checkExportIdentifier(table, exportIdentifiers);
applySqlStrings(dialect.getTableExporter().getSqlDropStrings(table, metadata), formatter, options, targets);
}
for (Sequence sequence : namespace.getSequences()) {
if (!schemaFilter.includeSequence(sequence)) {
continue;
}
checkExportIdentifier(sequence, exportIdentifiers);
applySqlStrings(dialect.getSequenceExporter().getSqlDropStrings(sequence, metadata), formatter, options, targets);
}
}
for (AuxiliaryDatabaseObject auxiliaryDatabaseObject : database.getAuxiliaryDatabaseObjects()) {
if (auxiliaryDatabaseObject.beforeTablesOnCreation()) {
continue;
}
if (!auxiliaryDatabaseObject.appliesToDialect(dialect)) {
continue;
}
applySqlStrings(auxiliaryDatabaseObject.sqlDropStrings(jdbcEnvironment.getDialect()), formatter, options, targets);
}
if (tryToDropCatalogs || tryToDropSchemas) {
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
for (Namespace namespace : database.getNamespaces()) {
if (!schemaFilter.includeNamespace(namespace)) {
continue;
}
if (tryToDropSchemas && namespace.getPhysicalName().getSchema() != null) {
applySqlStrings(dialect.getDropSchemaCommand(namespace.getPhysicalName().getSchema().render(dialect)), formatter, options, targets);
}
if (tryToDropCatalogs) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog();
if (catalogPhysicalName != null && !exportedCatalogs.contains(catalogLogicalName)) {
applySqlStrings(dialect.getDropCatalogCommand(catalogPhysicalName.render(dialect)), formatter, options, targets);
exportedCatalogs.add(catalogLogicalName);
}
}
}
}
}
Aggregations