use of eu.esdihumboldt.hale.io.jdbc.constraints.SQLType in project hale by halestudio.
the class AbstractDBTest method checkBindingAndSqlType.
/**
* It checks if the binding of a data type read from schema and the expected
* binding are equal.
*
* @param map It maps a data type with a binding class to be expected. e.g.
* for postgresql db, for data type VARCHAR, the expected binding
* class is String.class
* @param schema the schema read.
* @throws Exception exception may thrown while getting the value of a
* static or instance field of type int.
*/
protected void checkBindingAndSqlType(Schema schema, Map<String, Class<?>> map) throws Exception {
final Map<String, Integer> sqlTypeMap = new HashMap<>();
// all types fields
for (final Field f : Types.class.getFields()) {
sqlTypeMap.put(f.getName(), f.getInt(null));
}
for (TypeDefinition td : schema.getTypes()) {
for (ChildDefinition<?> cd : td.getChildren()) {
PropertyDefinition property = cd.asProperty();
String name = property.getPropertyType().getName().getLocalPart().toUpperCase();
SQLType t = property.getPropertyType().getConstraint(SQLType.class);
assertTrue(sqlTypeMap.containsValue(new Integer(t.getType())));
Binding k = property.getPropertyType().getConstraint(Binding.class);
// is mapped.
if (map.containsKey(name))
assertEquals("Binding mismatch for type \"" + name + "\"", map.get(name), k.getBinding());
else
fail(MessageFormat.format("No expected binding specified for type {0} (SQL type {1}) - binding is {2}", name, t.getType(), k.getBinding()));
}
}
}
use of eu.esdihumboldt.hale.io.jdbc.constraints.SQLType in project hale by halestudio.
the class JDBCInstanceWriter method populateInsertStatementOrExecuteAutoIncStatement.
/**
* Populate a prepared insert statement with values from the given instance.
* Checks if the property has auto incremental constraint, if it has then it
* maps the original old id to the id that is auto incremented while
* inserting the value. This mapping is used when inserting the foreign key
* values associated with those auto incremented column ids whose value has
* been changed. Thus, insertion of the foreign key wont fail. It will
* either execute the statement directly or add it into the batches
* depending upon the auto incremented flag.
*
* @param statement the insert statement
* @param properties the properties to fill the statement with
* @param instance the instance
* @param reporter the reporter
* @param conn Connection (used for Geometry conversion for oracle)
* @throws SQLException if configuring the statement fails
*/
private void populateInsertStatementOrExecuteAutoIncStatement(PreparedStatement statement, Set<QName> properties, Instance instance, IOReporter reporter, Connection conn) throws SQLException {
TypeDefinition type = instance.getDefinition();
int index = 1;
Object oldValue = null;
boolean isAutoIncremented = false;
for (QName propertyName : properties) {
PropertyDefinition property = (PropertyDefinition) type.getChild(propertyName);
Object[] values = instance.getProperty(propertyName);
SQLType sqlType = property.getPropertyType().getConstraint(SQLType.class);
if (!sqlType.isSet()) {
reporter.error(new IOMessageImpl("SQL type not set. Please only export to schemas read from a database.", null));
statement.setObject(index, null);
continue;
}
SQLArray arrayInfo = property.getPropertyType().getConstraint(SQLArray.class);
Object value;
if (arrayInfo.isArray() && arrayInfo.getDimension() <= 1) {
// array as multiple occurrence property
value = (values == null) ? (new Object[0]) : (values);
} else {
// single value
if (values != null && values.length > 1)
reporter.warn(new IOMessageImpl("Multiple values for a property. Only exporting first.", null));
value = (values == null || values.length == 0) ? null : values[0];
}
AutoGenerated auto = property.getConstraint(AutoGenerated.class);
if (!isWriteUnordered()) {
if (auto.isEnabled()) {
isAutoIncremented = true;
if (value != null) {
oldValue = value;
}
continue;
}
}
Reference ref = property.getConstraint(Reference.class);
if (ref.getReferencedTypes() != null) {
TypeDefinition td = (TypeDefinition) ref.getReferencedTypes().toArray()[0];
Map<Object, Long> marshMallow = typAuto.get(td);
if (marshMallow != null && value != null) {
// lookup identifier for reference
value = marshMallow.get(processLookupId(value));
}
}
if (values == null || values.length == 0) {
// XXX The default value could be a function call.
// Better would be to leave the column out of the insert
// statement, or set it to the SQL keyword "DEFAULT".
DefaultValue defaultValue = property.getConstraint(DefaultValue.class);
if (defaultValue.isSet())
statement.setObject(index, defaultValue.getValue(), sqlType.getType());
else if (property.getConstraint(NillableFlag.class).isEnabled())
statement.setNull(index, sqlType.getType());
else {
// no default, not nillable, will not work...
// set it to null here and let query fail (probably)
// XXX maybe skip this insert?
statement.setNull(index, sqlType.getType());
reporter.warn(new IOMessageImpl("Property no value, not nillable, no default value, insert will probably fail.", null));
}
} else if (value == null)
statement.setNull(index, sqlType.getType());
else
setStatementParameter(statement, index, value, property, sqlType.getType(), reporter, conn);
index++;
}
if (isAutoIncremented) {
statement.execute();
ResultSet rs = statement.getGeneratedKeys();
Long newValue = null;
while (rs.next()) {
newValue = rs.getLong(1);
}
addIDMapping(type, oldValue, newValue);
} else {
statement.addBatch();
}
}
Aggregations