use of eu.esdihumboldt.hale.common.schema.model.DefinitionGroup in project hale by halestudio.
the class OGroup method configureDocument.
private void configureDocument(ORecordAbstract<?> document, ODatabaseRecord db, DefinitionGroup definition) {
// document.setDatabase(db);
if (document instanceof ODocument) {
// reset class name
ODocument doc = (ODocument) document;
/*
* Attention: Two long class names cause problems as file names will
* be based on them.
*/
String className = null;
if (definition != null) {
className = ONamespaceMap.encode(determineName(definition));
} else if (doc.containsField(OSerializationHelper.BINARY_WRAPPER_FIELD) || doc.containsField(OSerializationHelper.FIELD_SERIALIZATION_TYPE)) {
className = OSerializationHelper.BINARY_WRAPPER_CLASSNAME;
}
if (className != null) {
OSchema schema = db.getMetadata().getSchema();
if (!schema.existsClass(className)) {
// if the class doesn't exist yet, create a physical cluster
// manually for it
int cluster = db.addCluster(className, CLUSTER_TYPE.PHYSICAL);
schema.createClass(className, cluster);
}
doc.setClassName(className);
}
// configure children
for (Entry<String, Object> field : doc) {
List<ODocument> docs = new ArrayList<ODocument>();
List<ORecordAbstract<?>> recs = new ArrayList<ORecordAbstract<?>>();
if (field.getValue() instanceof Collection<?>) {
for (Object value : (Collection<?>) field.getValue()) {
if (value instanceof ODocument && !getSpecialFieldNames().contains(field.getKey())) {
docs.add((ODocument) value);
} else if (value instanceof ORecordAbstract<?>) {
recs.add((ORecordAbstract<?>) value);
}
}
} else if (field.getValue() instanceof ODocument && !getSpecialFieldNames().contains(field.getKey())) {
docs.add((ODocument) field.getValue());
} else if (field.getValue() instanceof ORecordAbstract<?>) {
recs.add((ORecordAbstract<?>) field.getValue());
}
if (definition != null) {
for (ODocument valueDoc : docs) {
ChildDefinition<?> child = definition.getChild(decodeProperty(field.getKey()));
DefinitionGroup childGroup;
if (child.asProperty() != null) {
childGroup = child.asProperty().getPropertyType();
} else if (child.asGroup() != null) {
childGroup = child.asGroup();
} else {
throw new IllegalStateException("Document is associated neither with a property nor a property group.");
}
configureDocument(valueDoc, db, childGroup);
}
}
for (ORecordAbstract<?> fieldRec : recs) {
configureDocument(fieldRec, db, null);
}
}
}
}
use of eu.esdihumboldt.hale.common.schema.model.DefinitionGroup in project hale by halestudio.
the class PropertyBean method createEntityDefinition.
/**
* @see EntityBean#createEntityDefinition(TypeIndex, SchemaSpaceID)
*/
@Override
protected PropertyEntityDefinition createEntityDefinition(TypeIndex index, SchemaSpaceID schemaSpace) {
TypeDefinition typeDef = index.getType(getTypeName());
if (typeDef == null) {
throw new IllegalStateException(MessageFormat.format("TypeDefinition for type {0} not found", getTypeName()));
}
List<ChildContext> path = new ArrayList<ChildContext>();
DefinitionGroup parent = typeDef;
for (ChildContextBean childContext : properties) {
if (parent == null) {
throw new IllegalStateException("Could not resolve property entity definition: child not present");
}
Pair<ChildDefinition<?>, List<ChildDefinition<?>>> childs = findChild(parent, childContext.getChildName());
ChildDefinition<?> child = childs.getFirst();
// if the child is still null throw an exception
if (child == null) {
throw new IllegalStateException("Could not resolve property entity definition: child not found");
}
if (childs.getSecond() != null) {
for (ChildDefinition<?> pathElems : childs.getSecond()) {
path.add(new ChildContext(childContext.getContextName(), childContext.getContextIndex(), createCondition(childContext.getConditionFilter()), pathElems));
}
}
path.add(new ChildContext(childContext.getContextName(), childContext.getContextIndex(), createCondition(childContext.getConditionFilter()), child));
if (child instanceof DefinitionGroup) {
parent = (DefinitionGroup) child;
} else if (child.asProperty() != null) {
parent = child.asProperty().getPropertyType();
} else {
parent = null;
}
}
return new PropertyEntityDefinition(typeDef, path, schemaSpace, FilterDefinitionManager.getInstance().parse(getFilter()));
}
use of eu.esdihumboldt.hale.common.schema.model.DefinitionGroup in project hale by halestudio.
the class TypeStructureTray method createTargetSample.
/**
* Create sample code for populating the target property.
*
* @param selection the selection in the tree viewer
* @param types the types serving as input
* @return the sample code
*/
protected String createTargetSample(ISelection selection, Collection<? extends TypeDefinition> types) {
ITreeSelection treeSel = (ITreeSelection) selection;
TreePath[] paths = treeSel.getPaths();
if (paths != null && paths.length > 0) {
// XXX for now only use the first path
TreePath path = paths[0];
DefinitionGroup parent;
int startIndex = 0;
List<PathTree> properties;
// determine parent type
if (path.getFirstSegment() instanceof TypeDefinition) {
// XXX not supported yet
return null;
} else {
// types are not in the tree, single type must be root
parent = types.iterator().next();
// build PathTrees from tree paths
properties = PathTree.createPathTrees(Arrays.asList(paths), startIndex);
}
StringBuilder example = new StringBuilder();
example.append(GroovyConstants.BINDING_TARGET);
example.append(" {\n");
int indentCount = 1;
for (PathTree tree : properties) {
InstanceBuilderCode.appendBuildProperties(example, indentCount, tree, parent, BUILDER_USE_BRACKETS);
}
example.append("}");
return example.toString();
}
return GroovyConstants.BINDING_TARGET + " = {}";
}
use of eu.esdihumboldt.hale.common.schema.model.DefinitionGroup in project hale by halestudio.
the class AbstractGeometrySchemaService method generalizeGeometryProperty.
/**
* Generalize the path to the geometry property for the given type. This
* serves to prevent focusing on a single geometry property in a choice.
*
* @param type the type definition
* @param geometryPath the geometry path
* @return the generalized geometry path
*/
private List<QName> generalizeGeometryProperty(TypeDefinition type, List<QName> geometryPath) {
// collect child definitions associated to path names
List<ChildDefinition<?>> pathChildren = new ArrayList<ChildDefinition<?>>();
DefinitionGroup parent = type;
for (QName name : geometryPath) {
ChildDefinition<?> child = parent.getChild(name);
if (child == null) {
// invalid path
break;
}
pathChildren.add(child);
if (child.asProperty() != null) {
parent = child.asProperty().getPropertyType();
} else if (child.asGroup() != null) {
parent = child.asGroup();
} else {
throw new IllegalStateException("Invalid child definition");
}
}
// remove geometry properties parented by a choice
for (int i = pathChildren.size() - 1; i > 0; i--) {
// peek at the previous item
ChildDefinition<?> previous = pathChildren.get(i - 1);
if (previous.asGroup() != null && previous.asGroup().getConstraint(ChoiceFlag.class).isEnabled()) {
// previous item is a choice:
// delete the current item
pathChildren.remove(i);
// and continue
} else {
// everything after it
break;
}
}
// create a name list from the child list
List<QName> names = new ArrayList<QName>(pathChildren.size());
for (ChildDefinition<?> child : pathChildren) {
names.add(child.getName());
}
return names;
}
use of eu.esdihumboldt.hale.common.schema.model.DefinitionGroup in project hale by halestudio.
the class StreamGmlWriterTest method fillFeatureTest.
/**
* Create a feature, fill it with values, write it as GML, validate the GML
* and load the GML file again to compare the loaded values with the ones
* that were written
*
* @param elementName the element name of the feature type to use, if
* <code>null</code> a random element will be used
* @param targetSchema the schema to use, the first element will be used for
* the type of the feature
* @param values the values to set on the feature
* @param testName the name of the test
* @param srsName the SRS name
* @param skipValueTest if the check for equality shall be skipped
* @param expectWriteFail if the GML writing is expected to fail
* @param windingOrderParam winding order parameter or <code>null</code>
* @return the validation report or the GML writing report if writing
* expected to fail
* @throws Exception if any error occurs
*/
private IOReport fillFeatureTest(String elementName, URI targetSchema, Map<List<QName>, Object> values, String testName, String srsName, boolean skipValueTest, boolean expectWriteFail, EnumWindingOrderTypes windingOrderParam) throws Exception {
// load the sample schema
XmlSchemaReader reader = new XmlSchemaReader();
reader.setSharedTypes(null);
reader.setSource(new DefaultInputSupplier(targetSchema));
IOReport schemaReport = reader.execute(null);
assertTrue(schemaReport.isSuccess());
XmlIndex schema = reader.getSchema();
XmlElement element = null;
if (elementName == null) {
element = schema.getElements().values().iterator().next();
if (element == null) {
// $NON-NLS-1$
fail("No element found in the schema");
}
} else {
for (XmlElement candidate : schema.getElements().values()) {
if (candidate.getName().getLocalPart().equals(elementName)) {
element = candidate;
break;
}
}
if (element == null) {
// $NON-NLS-1$ //$NON-NLS-2$
fail("Element " + elementName + " not found in the schema");
}
}
if (element == null) {
throw new IllegalStateException();
}
// create feature
MutableInstance feature = new DefaultInstance(element.getType(), null);
// set some values
for (Entry<List<QName>, Object> entry : values.entrySet()) {
MutableGroup parent = feature;
List<QName> properties = entry.getKey();
for (int i = 0; i < properties.size() - 1; i++) {
QName propertyName = properties.get(i);
DefinitionGroup def = parent.getDefinition();
Object[] vals = parent.getProperty(propertyName);
if (vals != null && vals.length > 0) {
Object value = vals[0];
if (value instanceof MutableGroup) {
parent = (MutableGroup) value;
} else {
MutableGroup child;
ChildDefinition<?> childDef = def.getChild(propertyName);
if (childDef.asProperty() != null || value != null) {
// create instance
child = new DefaultInstance(childDef.asProperty().getPropertyType(), null);
} else {
// create group
child = new DefaultGroup(childDef.asGroup());
}
if (value != null) {
// wrap value
((MutableInstance) child).setValue(value);
}
parent = child;
}
}
}
parent.addProperty(properties.get(properties.size() - 1), entry.getValue());
}
InstanceCollection instances = new DefaultInstanceCollection(Collections.singleton(feature));
// write to file
InstanceWriter writer = new GmlInstanceWriter();
if (windingOrderParam != null) {
writer.setParameter(GeoInstanceWriter.PARAM_UNIFY_WINDING_ORDER, Value.of(windingOrderParam));
}
writer.setInstances(instances);
DefaultSchemaSpace schemaSpace = new DefaultSchemaSpace();
schemaSpace.addSchema(schema);
writer.setTargetSchema(schemaSpace);
// $NON-NLS-1$
File outFile = File.createTempFile(testName, ".gml");
writer.setTarget(new FileIOSupplier(outFile));
if (windingOrderParam != null && windingOrderParam == EnumWindingOrderTypes.counterClockwise) {
assertTrue(writer.getParameter(GeoInstanceWriter.PARAM_UNIFY_WINDING_ORDER).as(EnumWindingOrderTypes.class) == EnumWindingOrderTypes.counterClockwise);
}
// new LogProgressIndicator());
IOReport report = writer.execute(null);
if (expectWriteFail) {
assertFalse("Writing the GML output should not be successful", report.isSuccess());
return report;
} else {
assertTrue("Writing the GML output not successful", report.isSuccess());
}
List<? extends Locatable> validationSchemas = writer.getValidationSchemas();
System.out.println(outFile.getAbsolutePath());
System.out.println(targetSchema.toString());
// if (!DEL_TEMP_FILES && Desktop.isDesktopSupported()) {
// Desktop.getDesktop().open(outFile);
// }
IOReport valReport = validate(outFile.toURI(), validationSchemas);
// load file
InstanceCollection loaded = loadGML(outFile.toURI(), schema);
ResourceIterator<Instance> it = loaded.iterator();
try {
assertTrue(it.hasNext());
if (!skipValueTest) {
Instance l = it.next();
// test values
for (Entry<List<QName>, Object> entry : values.entrySet()) {
// XXX conversion?
Object expected = entry.getValue();
// String propertyPath = Joiner.on('.').join(Collections2.transform(entry.getKey(), new Function<QName, String>() {
//
// @Override
// public String apply(QName input) {
// return input.toString();
// }
// }));
// Collection<Object> propValues = PropertyResolver.getValues(
// l, propertyPath, true);
// assertEquals(1, propValues.size());
// Object value = propValues.iterator().next();
Collection<GeometryProperty<?>> geoms = GeometryUtil.getAllGeometries(l);
assertEquals(1, geoms.size());
Object value = geoms.iterator().next().getGeometry();
if (expected instanceof Geometry && value instanceof Geometry) {
if (windingOrderParam == null || windingOrderParam == EnumWindingOrderTypes.noChanges) {
matchGeometries((Geometry) expected, (Geometry) value);
}
// Winding Order Test.
if (windingOrderParam != null) {
if (windingOrderParam == EnumWindingOrderTypes.counterClockwise) {
assertTrue(((Geometry) expected).getNumGeometries() == ((Geometry) value).getNumGeometries());
assertTrue(WindingOrder.isCounterClockwise((Geometry) value));
} else if (windingOrderParam == EnumWindingOrderTypes.clockwise) {
assertFalse(WindingOrder.isCounterClockwise((Geometry) value));
} else {
assertTrue(WindingOrder.isCounterClockwise((Geometry) value) == WindingOrder.isCounterClockwise((Geometry) expected));
}
} else {
// TODO check winding order is CCW
if (value instanceof Polygon || value instanceof MultiPolygon)
assertTrue(WindingOrder.isCounterClockwise((Geometry) value));
}
} else {
assertEquals(expected.toString(), value.toString());
}
}
assertFalse(it.hasNext());
}
} finally {
it.close();
}
if (DEL_TEMP_FILES) {
outFile.deleteOnExit();
}
return valReport;
}
Aggregations