use of eu.esdihumboldt.hale.common.schema.model.constraint.property.Reference in project hale by halestudio.
the class XLinkReferenceValidator method validatePropertyConstraint.
@Override
public void validatePropertyConstraint(Object[] values, PropertyConstraint constraint, PropertyDefinition property, InstanceValidationContext context, ValidationLocation location) throws ValidationException {
if (values == null) {
return;
}
Object contextObj = context.getContext(XLinkReferenceValidator.class);
XLinkReferenceContext ctx;
if (contextObj instanceof XLinkReferenceContext) {
ctx = (XLinkReferenceContext) contextObj;
} else {
ctx = new XLinkReferenceContext();
context.putContext(XLinkReferenceValidator.class, ctx);
}
// collect local references
Reference ref = property.getConstraint(Reference.class);
if (ref instanceof XLinkReference && ref.isReference()) {
for (Object value : values) {
if (value != null) {
String id = value.toString();
if (id != null && id.startsWith("#")) {
ctx.addLocalReference(id.substring(1), location);
}
}
}
}
// collect XML IDs
Unique unique = property.getConstraint(Unique.class);
if (unique instanceof XmlIdUnique && unique.isEnabled()) {
for (Object value : values) {
addIdentifier(value, ctx);
}
}
}
use of eu.esdihumboldt.hale.common.schema.model.constraint.property.Reference in project hale by halestudio.
the class ReferenceFactory method restore.
@Override
public Reference restore(Value value, Definition<?> definition, TypeResolver typeIndex, ClassResolver resolver) throws Exception {
ValueProperties props = value.as(ValueProperties.class);
Reference ref = new Reference(props.get(P_IS_REF).as(Boolean.class, false));
Value types = props.get(P_TYPES);
if (types.isComplex()) {
ValueList list = types.as(ValueList.class);
if (list != null) {
for (Value entry : list) {
Optional<TypeDefinition> type = typeIndex.resolve(entry);
if (type.isPresent()) {
ref.addReferencedType(type.get());
} else {
throw new IllegalStateException("Could not resolve type definition for index " + entry);
}
}
}
}
return ref;
}
use of eu.esdihumboldt.hale.common.schema.model.constraint.property.Reference in project hale by halestudio.
the class JDBCInstanceWriter method populateInsertStatementOrExecuteAutoIncStatement.
/**
* Populate a prepared insert statement with values from the given instance.
* Checks if the property has auto incremental constraint, if it has then it
* maps the original old id to the id that is auto incremented while
* inserting the value. This mapping is used when inserting the foreign key
* values associated with those auto incremented column ids whose value has
* been changed. Thus, insertion of the foreign key wont fail. It will
* either execute the statement directly or add it into the batches
* depending upon the auto incremented flag.
*
* @param statement the insert statement
* @param properties the properties to fill the statement with
* @param instance the instance
* @param reporter the reporter
* @param conn Connection (used for Geometry conversion for oracle)
* @throws SQLException if configuring the statement fails
*/
private void populateInsertStatementOrExecuteAutoIncStatement(PreparedStatement statement, Set<QName> properties, Instance instance, IOReporter reporter, Connection conn) throws SQLException {
TypeDefinition type = instance.getDefinition();
int index = 1;
Object oldValue = null;
boolean isAutoIncremented = false;
for (QName propertyName : properties) {
PropertyDefinition property = (PropertyDefinition) type.getChild(propertyName);
Object[] values = instance.getProperty(propertyName);
SQLType sqlType = property.getPropertyType().getConstraint(SQLType.class);
if (!sqlType.isSet()) {
reporter.error(new IOMessageImpl("SQL type not set. Please only export to schemas read from a database.", null));
statement.setObject(index, null);
continue;
}
SQLArray arrayInfo = property.getPropertyType().getConstraint(SQLArray.class);
Object value;
if (arrayInfo.isArray() && arrayInfo.getDimension() <= 1) {
// array as multiple occurrence property
value = (values == null) ? (new Object[0]) : (values);
} else {
// single value
if (values != null && values.length > 1)
reporter.warn(new IOMessageImpl("Multiple values for a property. Only exporting first.", null));
value = (values == null || values.length == 0) ? null : values[0];
}
AutoGenerated auto = property.getConstraint(AutoGenerated.class);
if (!isWriteUnordered()) {
if (auto.isEnabled()) {
isAutoIncremented = true;
if (value != null) {
oldValue = value;
}
continue;
}
}
Reference ref = property.getConstraint(Reference.class);
if (ref.getReferencedTypes() != null) {
TypeDefinition td = (TypeDefinition) ref.getReferencedTypes().toArray()[0];
Map<Object, Long> marshMallow = typAuto.get(td);
if (marshMallow != null && value != null) {
// lookup identifier for reference
value = marshMallow.get(processLookupId(value));
}
}
if (values == null || values.length == 0) {
// XXX The default value could be a function call.
// Better would be to leave the column out of the insert
// statement, or set it to the SQL keyword "DEFAULT".
DefaultValue defaultValue = property.getConstraint(DefaultValue.class);
if (defaultValue.isSet())
statement.setObject(index, defaultValue.getValue(), sqlType.getType());
else if (property.getConstraint(NillableFlag.class).isEnabled())
statement.setNull(index, sqlType.getType());
else {
// no default, not nillable, will not work...
// set it to null here and let query fail (probably)
// XXX maybe skip this insert?
statement.setNull(index, sqlType.getType());
reporter.warn(new IOMessageImpl("Property no value, not nillable, no default value, insert will probably fail.", null));
}
} else if (value == null)
statement.setNull(index, sqlType.getType());
else
setStatementParameter(statement, index, value, property, sqlType.getType(), reporter, conn);
index++;
}
if (isAutoIncremented) {
statement.execute();
ResultSet rs = statement.getGeneratedKeys();
Long newValue = null;
while (rs.next()) {
newValue = rs.getLong(1);
}
addIDMapping(type, oldValue, newValue);
} else {
statement.addBatch();
}
}
use of eu.esdihumboldt.hale.common.schema.model.constraint.property.Reference in project hale by halestudio.
the class JDBCSchemaReader method getOrCreateProperty.
/**
* Gets or creates a property definition for the given column. Its type
* definition is created, too, if necessary.
*
* @param schema the schema the table belongs to
* @param tableType the type definition of the parent table this column
* belongs too
* @param column the column to get or create a property definition for
* @param overallNamespace the database namespace
* @param namespace the schema namespace
* @param typeIndex the type index
* @param connection the database connection
* @param reporter the reporter
* @param catalog the catalog for access to other column types
* @return the property definition for the given column
*/
private DefaultPropertyDefinition getOrCreateProperty(schemacrawler.schema.Schema schema, TypeDefinition tableType, Column column, String overallNamespace, String namespace, DefaultSchema typeIndex, Connection connection, IOReporter reporter, Catalog catalog) {
QName name = new QName(unquote(column.getName()));
// check for existing property definition
ChildDefinition<?> existing = tableType.getChild(name);
if (existing != null) {
return (DefaultPropertyDefinition) existing;
}
// create new one
// determine the column type
TypeDefinition columnType = getOrCreateColumnType(column, overallNamespace, typeIndex, connection, tableType, reporter, catalog);
SQLArray arrayInfo = columnType.getConstraint(SQLArray.class);
// create the property
DefaultPropertyDefinition property = new DefaultPropertyDefinition(name, tableType, columnType);
// configure property
if (column.getRemarks() != null && !column.getRemarks().isEmpty()) {
property.setDescription(column.getRemarks());
}
property.setConstraint(NillableFlag.get(column.isNullable()));
// XXX Default value is read as string from the meta data.
// This is probably not really a problem, but should be noted!
// XXX In particular the default value can be a function call like for
// example GETDATE().
String defaultValue = column.getDefaultValue();
if (arrayInfo.isArray() && arrayInfo.getDimension() <= 1) {
// dimension is not known (0)
if (!arrayInfo.hasSize(0)) {
property.setConstraint(Cardinality.CC_ANY_NUMBER);
} else {
// XXX what is appropriate as minimum?
long min = 0;
long max = arrayInfo.getSize(0);
property.setConstraint(Cardinality.get(min, max));
}
} else if (defaultValue != null) {
property.setConstraint(new DefaultValue(defaultValue));
property.setConstraint(Cardinality.CC_OPTIONAL);
} else
property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
// incremented or not
if (column.isAutoIncremented()) {
property.setConstraint(AutoGenerated.get(true));
} else {
property.setConstraint(AutoGenerated.get(false));
}
// since they can have multiple columns
if (column.isPartOfForeignKey()) {
Column referenced = column.getReferencedColumn();
// StackOverFlow exception.
if (!(referenced.getParent().equals(column.getParent()) && referenced.equals(column))) {
// Referenced table can be in different schema.
// creation of referenced column's table should not be with same
// Schema or Namespace. It should be with referenced table's
// Schema and namespace.
String referencedNameSpace = getReferencedTableNamespace(referenced.getParent(), overallNamespace);
property.setConstraint(new Reference(getOrCreateTableType(referenced.getParent().getSchema(), referenced.getParent(), overallNamespace, referencedNameSpace, typeIndex, connection, reporter, catalog)));
}
}
return property;
}
use of eu.esdihumboldt.hale.common.schema.model.constraint.property.Reference in project hale by halestudio.
the class AssignFromCollector method evaluate.
/**
* @see eu.esdihumboldt.hale.common.align.transformation.function.impl.AbstractSingleTargetPropertyTransformation#evaluate(java.lang.String,
* eu.esdihumboldt.hale.common.align.transformation.engine.TransformationEngine,
* com.google.common.collect.ListMultimap, java.lang.String,
* eu.esdihumboldt.hale.common.align.model.impl.PropertyEntityDefinition,
* java.util.Map,
* eu.esdihumboldt.hale.common.align.transformation.report.TransformationLog)
*/
@Override
protected Object evaluate(String transformationIdentifier, TransformationEngine engine, ListMultimap<String, PropertyValue> variables, String resultName, PropertyEntityDefinition resultProperty, Map<String, String> executionParameters, TransformationLog log) throws TransformationException, NoResultException {
// XXX check anchor?
final Collector mainCollector = (Collector) getExecutionContext().getTransformationContext().get(ContextHelpers.KEY_COLLECTOR);
if (mainCollector == null) {
throw new TransformationException("Fatal: No collector has been created yet. Check function priority.");
}
final ParameterValue collectorName = getParameterChecked(PARAMETER_COLLECTOR);
if (collectorName == null || collectorName.isEmpty()) {
throw new TransformationException("Fatal: No collector name was specified.");
}
final Collector collector = mainCollector.getAt(collectorName.getValue().toString());
if (collector == null) {
throw new TransformationException(MessageFormat.format("Error retrieving collector \"{0}\"", collectorName.getValue().toString()));
} else if (collector.values().isEmpty()) {
log.warn(new TransformationMessageImpl(getCell(), MessageFormat.format("Collector \"{0}\" contains no values. If this is unexpected, check the spelling of the collector name and the priority of the transformation function.", collectorName.getStringRepresentation()), null));
}
// Determine where to assign the collected values
final TypeDefinition resultPropertyType = resultProperty.getDefinition().getPropertyType();
final PropertyDefinition targetProperty;
final ResultStrategy resultStrategy;
if (resultPropertyType.getConstraint(HasValueFlag.class).isEnabled()) {
// The result property can take values, therefore assign directly to
// property
targetProperty = resultProperty.getDefinition();
// No instance creation is required in this case
resultStrategy = ResultStrategy.USE_VALUE;
} else {
// Find child element/attribute that can be assigned the reference
targetProperty = Optional.ofNullable(findReferenceChildProperty(resultPropertyType)).orElseThrow(() -> new TransformationException("Fatal: No child property could be found to assign a reference to."));
resultStrategy = ResultStrategy.BUILD_INSTANCE;
}
List<Object> collectedReferences = helper.extractCollectedValues(collector);
// Process collected values if target property is a reference, otherwise
// use plain values
final Function<Object, Object> referenceStrategy;
if (targetProperty.getConstraint(Reference.class).isReference()) {
final Reference referenceConstraint = targetProperty.getConstraint(Reference.class);
// Use the idToReference method to construct the reference
referenceStrategy = referenceConstraint::idToReference;
} else {
referenceStrategy = Function.identity();
}
MultiValue result = new MultiValue();
collectedReferences.forEach(ref -> result.add(resultStrategy.createResult(resultPropertyType, targetProperty, referenceStrategy.apply(ref))));
return result;
}
Aggregations