use of eu.esdihumboldt.hale.common.schema.model.constraint.property.AutoGenerated in project hale by halestudio.
the class JDBCInstanceWriter method getInsertStatement.
/**
* Create a prepared insert statement, based on the given type definition.
* Currently, auto incremented fields are not inserted into the statement.
*
* @param type the type definition
* @param properties the set properties of the instance for which this
* statement is
* @param typeStatements the already created statements
* @param connection the database connection
* @return the insert statement
* @throws SQLException if creating the prepared statement fails
*/
private PreparedStatement getInsertStatement(TypeDefinition type, Set<QName> properties, Map<TypeDefinition, Map<Set<QName>, PreparedStatement>> typeStatements, Connection connection) throws SQLException {
Map<Set<QName>, PreparedStatement> typeSpecificMap = typeStatements.get(type);
if (typeSpecificMap == null) {
typeSpecificMap = new HashMap<Set<QName>, PreparedStatement>();
typeStatements.put(type, typeSpecificMap);
}
PreparedStatement result = typeSpecificMap.get(properties);
if (result == null) {
String tableName = type.getConstraint(DatabaseTable.class).getFullTableName();
// create prepared statement SQL
StringBuffer pSql = new StringBuffer();
pSql.append("INSERT INTO ");
pSql.append(tableName);
pSql.append(" (");
StringBuffer valuesSql = new StringBuffer();
boolean first = true;
for (QName property : properties) {
AutoGenerated auto = ((PropertyDefinition) type.getChild(property)).getConstraint(AutoGenerated.class);
// statement (if writing is ordered)
if (!auto.isEnabled() || isWriteUnordered()) {
if (first)
first = false;
else {
pSql.append(", ");
valuesSql.append(",");
}
pSql.append('"').append(property.getLocalPart()).append('"');
valuesSql.append('?');
}
}
pSql.append(") VALUES (");
pSql.append(valuesSql);
pSql.append(")");
// XXX Actually we don't necessarily need the auto generated keys,
// we need the primary key!
// XXX , Statement.RETURN_GENERATED_KEYS does not work with batches
// in PostgreSQL
// Auto generated keys are returned, for creating a map with old
// value and auto generated.
// As it does not work with batches in PostGreSQL, each instance
// which has auto incremented ids are written one at a time and
// returned the auto generated key.
result = connection.prepareStatement(pSql.toString(), Statement.RETURN_GENERATED_KEYS);
typeSpecificMap.put(properties, result);
}
return result;
}
use of eu.esdihumboldt.hale.common.schema.model.constraint.property.AutoGenerated in project hale by halestudio.
the class JDBCInstanceWriter method populateInsertStatementOrExecuteAutoIncStatement.
/**
* Populate a prepared insert statement with values from the given instance.
* Checks if the property has auto incremental constraint, if it has then it
* maps the original old id to the id that is auto incremented while
* inserting the value. This mapping is used when inserting the foreign key
* values associated with those auto incremented column ids whose value has
* been changed. Thus, insertion of the foreign key wont fail. It will
* either execute the statement directly or add it into the batches
* depending upon the auto incremented flag.
*
* @param statement the insert statement
* @param properties the properties to fill the statement with
* @param instance the instance
* @param reporter the reporter
* @param conn Connection (used for Geometry conversion for oracle)
* @throws SQLException if configuring the statement fails
*/
private void populateInsertStatementOrExecuteAutoIncStatement(PreparedStatement statement, Set<QName> properties, Instance instance, IOReporter reporter, Connection conn) throws SQLException {
TypeDefinition type = instance.getDefinition();
int index = 1;
Object oldValue = null;
boolean isAutoIncremented = false;
for (QName propertyName : properties) {
PropertyDefinition property = (PropertyDefinition) type.getChild(propertyName);
Object[] values = instance.getProperty(propertyName);
SQLType sqlType = property.getPropertyType().getConstraint(SQLType.class);
if (!sqlType.isSet()) {
reporter.error(new IOMessageImpl("SQL type not set. Please only export to schemas read from a database.", null));
statement.setObject(index, null);
continue;
}
SQLArray arrayInfo = property.getPropertyType().getConstraint(SQLArray.class);
Object value;
if (arrayInfo.isArray() && arrayInfo.getDimension() <= 1) {
// array as multiple occurrence property
value = (values == null) ? (new Object[0]) : (values);
} else {
// single value
if (values != null && values.length > 1)
reporter.warn(new IOMessageImpl("Multiple values for a property. Only exporting first.", null));
value = (values == null || values.length == 0) ? null : values[0];
}
AutoGenerated auto = property.getConstraint(AutoGenerated.class);
if (!isWriteUnordered()) {
if (auto.isEnabled()) {
isAutoIncremented = true;
if (value != null) {
oldValue = value;
}
continue;
}
}
Reference ref = property.getConstraint(Reference.class);
if (ref.getReferencedTypes() != null) {
TypeDefinition td = (TypeDefinition) ref.getReferencedTypes().toArray()[0];
Map<Object, Long> marshMallow = typAuto.get(td);
if (marshMallow != null && value != null) {
// lookup identifier for reference
value = marshMallow.get(processLookupId(value));
}
}
if (values == null || values.length == 0) {
// XXX The default value could be a function call.
// Better would be to leave the column out of the insert
// statement, or set it to the SQL keyword "DEFAULT".
DefaultValue defaultValue = property.getConstraint(DefaultValue.class);
if (defaultValue.isSet())
statement.setObject(index, defaultValue.getValue(), sqlType.getType());
else if (property.getConstraint(NillableFlag.class).isEnabled())
statement.setNull(index, sqlType.getType());
else {
// no default, not nillable, will not work...
// set it to null here and let query fail (probably)
// XXX maybe skip this insert?
statement.setNull(index, sqlType.getType());
reporter.warn(new IOMessageImpl("Property no value, not nillable, no default value, insert will probably fail.", null));
}
} else if (value == null)
statement.setNull(index, sqlType.getType());
else
setStatementParameter(statement, index, value, property, sqlType.getType(), reporter, conn);
index++;
}
if (isAutoIncremented) {
statement.execute();
ResultSet rs = statement.getGeneratedKeys();
Long newValue = null;
while (rs.next()) {
newValue = rs.getLong(1);
}
addIDMapping(type, oldValue, newValue);
} else {
statement.addBatch();
}
}
Aggregations