use of eu.esdihumboldt.hale.io.jdbc.constraints.SQLArray in project hale by halestudio.
the class JDBCInstanceWriter method populateInsertStatementOrExecuteAutoIncStatement.
/**
* Populate a prepared insert statement with values from the given instance.
* Checks if the property has auto incremental constraint, if it has then it
* maps the original old id to the id that is auto incremented while
* inserting the value. This mapping is used when inserting the foreign key
* values associated with those auto incremented column ids whose value has
* been changed. Thus, insertion of the foreign key wont fail. It will
* either execute the statement directly or add it into the batches
* depending upon the auto incremented flag.
*
* @param statement the insert statement
* @param properties the properties to fill the statement with
* @param instance the instance
* @param reporter the reporter
* @param conn Connection (used for Geometry conversion for oracle)
* @throws SQLException if configuring the statement fails
*/
private void populateInsertStatementOrExecuteAutoIncStatement(PreparedStatement statement, Set<QName> properties, Instance instance, IOReporter reporter, Connection conn) throws SQLException {
TypeDefinition type = instance.getDefinition();
int index = 1;
Object oldValue = null;
boolean isAutoIncremented = false;
for (QName propertyName : properties) {
PropertyDefinition property = (PropertyDefinition) type.getChild(propertyName);
Object[] values = instance.getProperty(propertyName);
SQLType sqlType = property.getPropertyType().getConstraint(SQLType.class);
if (!sqlType.isSet()) {
reporter.error(new IOMessageImpl("SQL type not set. Please only export to schemas read from a database.", null));
statement.setObject(index, null);
continue;
}
SQLArray arrayInfo = property.getPropertyType().getConstraint(SQLArray.class);
Object value;
if (arrayInfo.isArray() && arrayInfo.getDimension() <= 1) {
// array as multiple occurrence property
value = (values == null) ? (new Object[0]) : (values);
} else {
// single value
if (values != null && values.length > 1)
reporter.warn(new IOMessageImpl("Multiple values for a property. Only exporting first.", null));
value = (values == null || values.length == 0) ? null : values[0];
}
AutoGenerated auto = property.getConstraint(AutoGenerated.class);
if (!isWriteUnordered()) {
if (auto.isEnabled()) {
isAutoIncremented = true;
if (value != null) {
oldValue = value;
}
continue;
}
}
Reference ref = property.getConstraint(Reference.class);
if (ref.getReferencedTypes() != null) {
TypeDefinition td = (TypeDefinition) ref.getReferencedTypes().toArray()[0];
Map<Object, Long> marshMallow = typAuto.get(td);
if (marshMallow != null && value != null) {
// lookup identifier for reference
value = marshMallow.get(processLookupId(value));
}
}
if (values == null || values.length == 0) {
// XXX The default value could be a function call.
// Better would be to leave the column out of the insert
// statement, or set it to the SQL keyword "DEFAULT".
DefaultValue defaultValue = property.getConstraint(DefaultValue.class);
if (defaultValue.isSet())
statement.setObject(index, defaultValue.getValue(), sqlType.getType());
else if (property.getConstraint(NillableFlag.class).isEnabled())
statement.setNull(index, sqlType.getType());
else {
// no default, not nillable, will not work...
// set it to null here and let query fail (probably)
// XXX maybe skip this insert?
statement.setNull(index, sqlType.getType());
reporter.warn(new IOMessageImpl("Property no value, not nillable, no default value, insert will probably fail.", null));
}
} else if (value == null)
statement.setNull(index, sqlType.getType());
else
setStatementParameter(statement, index, value, property, sqlType.getType(), reporter, conn);
index++;
}
if (isAutoIncremented) {
statement.execute();
ResultSet rs = statement.getGeneratedKeys();
Long newValue = null;
while (rs.next()) {
newValue = rs.getLong(1);
}
addIDMapping(type, oldValue, newValue);
} else {
statement.addBatch();
}
}
use of eu.esdihumboldt.hale.io.jdbc.constraints.SQLArray in project hale by halestudio.
the class SQLSchemaReader method getOrCreateProperty.
/**
* Gets or creates a property definition for the given column. Its type
* definition is created, too, if necessary.
*
* @param tableType the type definition of the parent table this column
* belongs too
* @param column the column to get or create a property definition for
* @param namespace the schema namespace
* @param typeIndex the type index
* @param connection the database connection
* @param reporter the reporter
* @param catalog the database information
* @return the property definition for the given column
*/
private DefaultPropertyDefinition getOrCreateProperty(TypeDefinition tableType, ResultsColumn column, String namespace, DefaultSchema typeIndex, Connection connection, IOReporter reporter, Catalog catalog) {
QName name = new QName(unquote(column.getName()));
// check for existing property definition
ChildDefinition<?> existing = tableType.getChild(name);
if (existing != null)
return (DefaultPropertyDefinition) existing;
// create new one
// determine the column type
TypeDefinition columnType = JDBCSchemaReader.getOrCreateColumnType(column, namespace, typeIndex, connection, tableType, reporter, catalog);
SQLArray arrayInfo = columnType.getConstraint(SQLArray.class);
// create the property
DefaultPropertyDefinition property = new DefaultPropertyDefinition(name, tableType, columnType);
// configure property
if (column.getRemarks() != null && !column.getRemarks().isEmpty()) {
property.setDescription(column.getRemarks());
}
property.setConstraint(NillableFlag.get(column.isNullable()));
if (arrayInfo.isArray() && arrayInfo.getDimension() <= 1) {
// dimension is not known (0)
if (!arrayInfo.hasSize(0)) {
property.setConstraint(Cardinality.CC_ANY_NUMBER);
} else {
// XXX what is appropriate as minimum?
long min = 0;
long max = arrayInfo.getSize(0);
property.setConstraint(Cardinality.get(min, max));
}
} else {
property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
}
return property;
}
use of eu.esdihumboldt.hale.io.jdbc.constraints.SQLArray in project hale by halestudio.
the class SQLArrayFactory method restore.
@Override
public SQLArray restore(Value value, Definition<?> definition, TypeResolver typeIndex, ClassResolver resolver) throws Exception {
ValueProperties props = value.as(ValueProperties.class);
if (props != null) {
// read element class
Class<?> elementType = null;
String className = props.getSafe(NAME_ELEMENT_CLASS).as(String.class);
if (className != null) {
elementType = resolver.loadClass(className);
}
// read element database type name
String elementTypeName = props.getSafe(NAME_ELEMENT_TYPE_NAME).as(String.class);
// read dimension
int dimension = props.getSafe(NAME_DIMENSION).as(Integer.class, 0);
// read array dimension sizes
int[] sizes = null;
ValueList sizeList = props.getSafe(NAME_SIZES).as(ValueList.class);
if (sizeList != null) {
sizes = new int[sizeList.size()];
int index = 0;
for (Value size : sizeList) {
sizes[index] = size.as(Integer.class, 0);
index++;
}
}
return new SQLArray(elementType, elementTypeName, dimension, sizes);
}
return SQLArray.NO_ARRAY;
}
use of eu.esdihumboldt.hale.io.jdbc.constraints.SQLArray in project hale by halestudio.
the class JDBCSchemaReader method getOrCreateColumnType.
/**
* Get or create the type definition for the given column.
*
* @param column the column
* @param overallNamespace the database namespace
* @param types the type index
* @param connection the database connection
* @param tableType the type definition of the table the column is part of
* @param reporter the reporter
* @param catalog the catalog for access to other column types
* @return the type definition for the column type
*/
public static TypeDefinition getOrCreateColumnType(BaseColumn<?> column, final String overallNamespace, DefaultSchema types, Connection connection, TypeDefinition tableType, IOReporter reporter, Catalog catalog) {
// XXX what about shared types?
// TODO the size/width info (VARCHAR(_30_)) is in column, the
// columntype/-name is not sufficient
// getType();
ColumnDataType columnType = column.getColumnDataType();
String localName = columnType.getName();
QName typeName = new QName(overallNamespace, localName);
// check for geometry type
GeometryTypeInfo geomType = GeometryTypeExtension.getInstance().getTypeInfo(columnType.getName(), connection);
@SuppressWarnings("rawtypes") GeometryAdvisor geomAdvisor = null;
if (geomType != null) {
geomAdvisor = geomType.getGeometryAdvisor();
// determine if a type specifically for this column is needed
if (!geomAdvisor.isFixedType(columnType)) {
// must use a specific type definition for this column
// -> use a type name based on the column
// new namespace is the table and column name
String ns = tableType.getName().getNamespaceURI() + '/' + tableType.getName().getLocalPart() + '/' + unquote(column.getName());
typeName = new QName(ns, localName);
}
}
// check for existing type
TypeDefinition existing = types.getType(typeName);
if (existing != null)
return existing;
// create new type
DefaultTypeDefinition type = new DefaultTypeDefinition(typeName);
type.setConstraint(HasValueFlag.ENABLED);
CustomType cust = CustomTypeExtension.getInstance().getCustomType(localName, connection);
/*
* Oracle jdbc was returning sqltype -6 for NUMBER data type, but it is
* bound to boolean by Types.java class. Configured the sqltype 6 for
* NUMBER data type.
*/
if (cust != null) {
type.setConstraint(SQLType.get(cust.getSQLType()));
} else {
type.setConstraint(SQLType.get(columnType.getJavaSqlType().getJavaSqlType()));
}
if (geomType != null && geomAdvisor != null) {
// configure geometry type
@SuppressWarnings("unchecked") Class<? extends Geometry> geomClass = geomAdvisor.configureGeometryColumnType(connection, column, type, reporter);
type.setConstraint(GeometryType.get(geomClass));
// always a single geometry
type.setConstraint(Binding.get(GeometryProperty.class));
// remember advisor for type (used in instance writer)
type.setConstraint(geomType.getConstraint());
} else {
// configure type
Class<?> binding = null;
if (cust != null) {
binding = cust.getBinding();
} else {
if (column.getColumnDataType().getJavaSqlType().getJavaSqlType() == Types.ARRAY) {
// TODO let this be handled by a possible advisor?
/*
* Special handling for arrays.
*
* Challenges:
*
* - find the type of contained items
*
* - determine dimensions and size
*/
Class<?> elementBinding;
// determine type/binding of contained items
ColumnDataType cdt = column.getColumnDataType();
ColumnDataType itemType = null;
String dbTypeName = cdt.getDatabaseSpecificTypeName();
// prefix and look up type
if (dbTypeName.startsWith("_")) {
String testName = dbTypeName.substring(1);
itemType = catalog.getSystemColumnDataType(testName);
}
if (itemType == null) {
// generic binding
elementBinding = Object.class;
reporter.error(new IOMessageImpl("Could not determine element type for array column", null));
} else {
elementBinding = itemType.getTypeMappedClass();
// TODO support custom bindings?
// XXX probably needed for Oracle?
}
// dimensions and size cannot be determined from schema
// crawler it seems - would need database specific queries
// (if the info is available at all)
/*
* Postgres:
*
* Dimensions and size are not part of the schema, they can
* only be determined for a value.
*/
// XXX for now, stick to what we can determine
int dimension = SQLArray.UNKNOWN_DIMENSION;
String specificTypeName = (itemType != null) ? (itemType.getDatabaseSpecificTypeName()) : (null);
type.setConstraint(new SQLArray(elementBinding, specificTypeName, dimension, null));
// set binding
if (dimension <= 1) {
// XXX for now, use this representation also if
// dimension is not known
// 1-dimensional -> as multiple occurrences
binding = elementBinding;
} else {
// XXX use collection or something similar instead?
binding = Object.class;
}
} else {
binding = column.getColumnDataType().getTypeMappedClass();
}
}
type.setConstraint(Binding.get(binding));
type.setConstraint(HasValueFlag.ENABLED);
}
if (columnType.getRemarks() != null && !columnType.getRemarks().isEmpty())
type.setDescription(columnType.getRemarks());
types.addType(type);
return type;
}
use of eu.esdihumboldt.hale.io.jdbc.constraints.SQLArray in project hale by halestudio.
the class JDBCSchemaReader method getOrCreateProperty.
/**
* Gets or creates a property definition for the given column. Its type
* definition is created, too, if necessary.
*
* @param schema the schema the table belongs to
* @param tableType the type definition of the parent table this column
* belongs too
* @param column the column to get or create a property definition for
* @param overallNamespace the database namespace
* @param namespace the schema namespace
* @param typeIndex the type index
* @param connection the database connection
* @param reporter the reporter
* @param catalog the catalog for access to other column types
* @return the property definition for the given column
*/
private DefaultPropertyDefinition getOrCreateProperty(schemacrawler.schema.Schema schema, TypeDefinition tableType, Column column, String overallNamespace, String namespace, DefaultSchema typeIndex, Connection connection, IOReporter reporter, Catalog catalog) {
QName name = new QName(unquote(column.getName()));
// check for existing property definition
ChildDefinition<?> existing = tableType.getChild(name);
if (existing != null) {
return (DefaultPropertyDefinition) existing;
}
// create new one
// determine the column type
TypeDefinition columnType = getOrCreateColumnType(column, overallNamespace, typeIndex, connection, tableType, reporter, catalog);
SQLArray arrayInfo = columnType.getConstraint(SQLArray.class);
// create the property
DefaultPropertyDefinition property = new DefaultPropertyDefinition(name, tableType, columnType);
// configure property
if (column.getRemarks() != null && !column.getRemarks().isEmpty()) {
property.setDescription(column.getRemarks());
}
property.setConstraint(NillableFlag.get(column.isNullable()));
// XXX Default value is read as string from the meta data.
// This is probably not really a problem, but should be noted!
// XXX In particular the default value can be a function call like for
// example GETDATE().
String defaultValue = column.getDefaultValue();
if (arrayInfo.isArray() && arrayInfo.getDimension() <= 1) {
// dimension is not known (0)
if (!arrayInfo.hasSize(0)) {
property.setConstraint(Cardinality.CC_ANY_NUMBER);
} else {
// XXX what is appropriate as minimum?
long min = 0;
long max = arrayInfo.getSize(0);
property.setConstraint(Cardinality.get(min, max));
}
} else if (defaultValue != null) {
property.setConstraint(new DefaultValue(defaultValue));
property.setConstraint(Cardinality.CC_OPTIONAL);
} else
property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
// incremented or not
if (column.isAutoIncremented()) {
property.setConstraint(AutoGenerated.get(true));
} else {
property.setConstraint(AutoGenerated.get(false));
}
// since they can have multiple columns
if (column.isPartOfForeignKey()) {
Column referenced = column.getReferencedColumn();
// StackOverFlow exception.
if (!(referenced.getParent().equals(column.getParent()) && referenced.equals(column))) {
// Referenced table can be in different schema.
// creation of referenced column's table should not be with same
// Schema or Namespace. It should be with referenced table's
// Schema and namespace.
String referencedNameSpace = getReferencedTableNamespace(referenced.getParent(), overallNamespace);
property.setConstraint(new Reference(getOrCreateTableType(referenced.getParent().getSchema(), referenced.getParent(), overallNamespace, referencedNameSpace, typeIndex, connection, reporter, catalog)));
}
}
return property;
}
Aggregations