use of eu.esdihumboldt.hale.io.xsd.reader.XmlSchemaReader in project hale by halestudio.
the class AppSchemaIsolatedWorkspacesMappingTest method init.
@BeforeClass
public static void init() throws Exception {
TestUtil.startConversionService();
sourceSchemaSpace = new DefaultSchemaSpace();
targetSchemaSpace = new DefaultSchemaSpace();
Schema source = loadSchema(new HaleSchemaReader(), SOURCE_SCHEMA_PATH);
assertNotNull(source);
sourceSchemaSpace.addSchema(source);
Schema target = loadSchema(new XmlSchemaReader(), TARGET_SCHEMA_STATIONS_PATH);
assertNotNull(target);
targetSchemaSpace.addSchema(target);
// make sure MeasurementsType is included in the mapping relevant types
List<TypeDefinition> mappingRelevantTypes = new ArrayList<>();
mappingRelevantTypes.add(targetSchemaSpace.getType(MEASUREMENTS_QNAME));
targetSchemaSpace.toggleMappingRelevant(mappingRelevantTypes);
assertEquals(3, targetSchemaSpace.getMappingRelevantTypes().size());
alignment = loadAlignment(new JaxbAlignmentReader(), ALIGNMENT_PATH);
assertNotNull(alignment);
}
use of eu.esdihumboldt.hale.io.xsd.reader.XmlSchemaReader in project hale by halestudio.
the class StreamGmlWriterTest method fillFeatureTest.
/**
* Create a feature, fill it with values, write it as GML, validate the GML
* and load the GML file again to compare the loaded values with the ones
* that were written
*
* @param elementName the element name of the feature type to use, if
* <code>null</code> a random element will be used
* @param targetSchema the schema to use, the first element will be used for
* the type of the feature
* @param values the values to set on the feature
* @param testName the name of the test
* @param srsName the SRS name
* @param skipValueTest if the check for equality shall be skipped
* @param expectWriteFail if the GML writing is expected to fail
* @param windingOrderParam winding order parameter or <code>null</code>
* @return the validation report or the GML writing report if writing
* expected to fail
* @throws Exception if any error occurs
*/
private IOReport fillFeatureTest(String elementName, URI targetSchema, Map<List<QName>, Object> values, String testName, String srsName, boolean skipValueTest, boolean expectWriteFail, EnumWindingOrderTypes windingOrderParam) throws Exception {
// load the sample schema
XmlSchemaReader reader = new XmlSchemaReader();
reader.setSharedTypes(null);
reader.setSource(new DefaultInputSupplier(targetSchema));
IOReport schemaReport = reader.execute(null);
assertTrue(schemaReport.isSuccess());
XmlIndex schema = reader.getSchema();
XmlElement element = null;
if (elementName == null) {
element = schema.getElements().values().iterator().next();
if (element == null) {
// $NON-NLS-1$
fail("No element found in the schema");
}
} else {
for (XmlElement candidate : schema.getElements().values()) {
if (candidate.getName().getLocalPart().equals(elementName)) {
element = candidate;
break;
}
}
if (element == null) {
// $NON-NLS-1$ //$NON-NLS-2$
fail("Element " + elementName + " not found in the schema");
}
}
if (element == null) {
throw new IllegalStateException();
}
// create feature
MutableInstance feature = new DefaultInstance(element.getType(), null);
// set some values
for (Entry<List<QName>, Object> entry : values.entrySet()) {
MutableGroup parent = feature;
List<QName> properties = entry.getKey();
for (int i = 0; i < properties.size() - 1; i++) {
QName propertyName = properties.get(i);
DefinitionGroup def = parent.getDefinition();
Object[] vals = parent.getProperty(propertyName);
if (vals != null && vals.length > 0) {
Object value = vals[0];
if (value instanceof MutableGroup) {
parent = (MutableGroup) value;
} else {
MutableGroup child;
ChildDefinition<?> childDef = def.getChild(propertyName);
if (childDef.asProperty() != null || value != null) {
// create instance
child = new DefaultInstance(childDef.asProperty().getPropertyType(), null);
} else {
// create group
child = new DefaultGroup(childDef.asGroup());
}
if (value != null) {
// wrap value
((MutableInstance) child).setValue(value);
}
parent = child;
}
}
}
parent.addProperty(properties.get(properties.size() - 1), entry.getValue());
}
InstanceCollection instances = new DefaultInstanceCollection(Collections.singleton(feature));
// write to file
InstanceWriter writer = new GmlInstanceWriter();
if (windingOrderParam != null) {
writer.setParameter(GeoInstanceWriter.PARAM_UNIFY_WINDING_ORDER, Value.of(windingOrderParam));
}
writer.setInstances(instances);
DefaultSchemaSpace schemaSpace = new DefaultSchemaSpace();
schemaSpace.addSchema(schema);
writer.setTargetSchema(schemaSpace);
// $NON-NLS-1$
File outFile = File.createTempFile(testName, ".gml");
writer.setTarget(new FileIOSupplier(outFile));
if (windingOrderParam != null && windingOrderParam == EnumWindingOrderTypes.counterClockwise) {
assertTrue(writer.getParameter(GeoInstanceWriter.PARAM_UNIFY_WINDING_ORDER).as(EnumWindingOrderTypes.class) == EnumWindingOrderTypes.counterClockwise);
}
// new LogProgressIndicator());
IOReport report = writer.execute(null);
if (expectWriteFail) {
assertFalse("Writing the GML output should not be successful", report.isSuccess());
return report;
} else {
assertTrue("Writing the GML output not successful", report.isSuccess());
}
List<? extends Locatable> validationSchemas = writer.getValidationSchemas();
System.out.println(outFile.getAbsolutePath());
System.out.println(targetSchema.toString());
// if (!DEL_TEMP_FILES && Desktop.isDesktopSupported()) {
// Desktop.getDesktop().open(outFile);
// }
IOReport valReport = validate(outFile.toURI(), validationSchemas);
// load file
InstanceCollection loaded = loadGML(outFile.toURI(), schema);
ResourceIterator<Instance> it = loaded.iterator();
try {
assertTrue(it.hasNext());
if (!skipValueTest) {
Instance l = it.next();
// test values
for (Entry<List<QName>, Object> entry : values.entrySet()) {
// XXX conversion?
Object expected = entry.getValue();
// String propertyPath = Joiner.on('.').join(Collections2.transform(entry.getKey(), new Function<QName, String>() {
//
// @Override
// public String apply(QName input) {
// return input.toString();
// }
// }));
// Collection<Object> propValues = PropertyResolver.getValues(
// l, propertyPath, true);
// assertEquals(1, propValues.size());
// Object value = propValues.iterator().next();
Collection<GeometryProperty<?>> geoms = GeometryUtil.getAllGeometries(l);
assertEquals(1, geoms.size());
Object value = geoms.iterator().next().getGeometry();
if (expected instanceof Geometry && value instanceof Geometry) {
if (windingOrderParam == null || windingOrderParam == EnumWindingOrderTypes.noChanges) {
matchGeometries((Geometry) expected, (Geometry) value);
}
// Winding Order Test.
if (windingOrderParam != null) {
if (windingOrderParam == EnumWindingOrderTypes.counterClockwise) {
assertTrue(((Geometry) expected).getNumGeometries() == ((Geometry) value).getNumGeometries());
assertTrue(WindingOrder.isCounterClockwise((Geometry) value));
} else if (windingOrderParam == EnumWindingOrderTypes.clockwise) {
assertFalse(WindingOrder.isCounterClockwise((Geometry) value));
} else {
assertTrue(WindingOrder.isCounterClockwise((Geometry) value) == WindingOrder.isCounterClockwise((Geometry) expected));
}
} else {
// TODO check winding order is CCW
if (value instanceof Polygon || value instanceof MultiPolygon)
assertTrue(WindingOrder.isCounterClockwise((Geometry) value));
}
} else {
assertEquals(expected.toString(), value.toString());
}
}
assertFalse(it.hasNext());
}
} finally {
it.close();
}
if (DEL_TEMP_FILES) {
outFile.deleteOnExit();
}
return valReport;
}
use of eu.esdihumboldt.hale.io.xsd.reader.XmlSchemaReader in project hale by halestudio.
the class StreamGmlWriter method findDefaultContainter.
/**
* Find the default container element.
*
* @param targetIndex the target type index
* @param reporter the reporter, may be <code>null</code>
* @return the container XML element or <code>null</code>
*/
protected XmlElement findDefaultContainter(XmlIndex targetIndex, IOReporter reporter) {
if (useFeatureCollection) {
// try to find FeatureCollection element
Iterator<XmlElement> it = targetIndex.getElements().values().iterator();
Collection<XmlElement> fcElements = new HashSet<XmlElement>();
while (it.hasNext()) {
XmlElement el = it.next();
if (isFeatureCollection(el)) {
fcElements.add(el);
}
}
if (fcElements.isEmpty() && gmlNs != null && gmlNs.equals(NS_GML)) {
// include WFS 1.0.0 for the FeatureCollection element
try {
URI location = StreamGmlWriter.class.getResource("/schemas/wfs/1.0.0/WFS-basic.xsd").toURI();
XmlSchemaReader schemaReader = new XmlSchemaReader();
schemaReader.setSource(new DefaultInputSupplier(location));
// FIXME to work with the extra schema it must be integrated
// with the main schema
// schemaReader.setSharedTypes(sharedTypes);
IOReport report = schemaReader.execute(null);
if (report.isSuccess()) {
XmlIndex wfsSchema = schemaReader.getSchema();
// look for FeatureCollection element
for (XmlElement el : wfsSchema.getElements().values()) {
if (isFeatureCollection(el)) {
fcElements.add(el);
}
}
// add as additional schema, replace location for
// verification
additionalSchemas.put(wfsSchema.getNamespace(), new SchemaDecorator(wfsSchema) {
@Override
public URI getLocation() {
return URI.create("http://schemas.opengis.net/wfs/1.0.0/WFS-basic.xsd");
}
});
// add namespace
// $NON-NLS-1$
GmlWriterUtil.addNamespace(writer, wfsSchema.getNamespace(), "wfs");
}
} catch (Exception e) {
// $NON-NLS-1$
log.warn("Using WFS schema for the FeatureCollection definition failed", e);
}
}
if (fcElements.isEmpty() && reporter != null) {
reporter.warn(// $NON-NLS-1$
new IOMessageImpl("No element describing a FeatureCollection found", null));
} else {
// select fc element TODO priorized selection (root element
// parameters)
XmlElement fcElement = fcElements.iterator().next();
log.info(// $NON-NLS-1$ //$NON-NLS-2$
"Found " + fcElements.size() + " possible FeatureCollection elements" + ", using element " + // $NON-NLS-1$
fcElement.getName());
return fcElement;
}
}
return null;
}
use of eu.esdihumboldt.hale.io.xsd.reader.XmlSchemaReader in project hale by halestudio.
the class AbstractHandlerTest method loadXMLInstances.
/**
* Load an instance collection from a GML file.
*
* @param schemaLocation the GML application schema location
* @param xmlLocation the GML file location
* @return the instance collection
* @throws IOException if reading schema or instances failed
* @throws IOProviderConfigurationException if the I/O providers were not
* configured correctly
*/
public static InstanceCollection loadXMLInstances(URI schemaLocation, URI xmlLocation) throws IOException, IOProviderConfigurationException {
SchemaReader reader = new XmlSchemaReader();
reader.setSharedTypes(null);
reader.setSource(new DefaultInputSupplier(schemaLocation));
IOReport schemaReport = reader.execute(null);
assertTrue(schemaReport.isSuccess());
Schema sourceSchema = reader.getSchema();
InstanceReader instanceReader = new GmlInstanceReader();
instanceReader.setSource(new DefaultInputSupplier(xmlLocation));
instanceReader.setSourceSchema(sourceSchema);
IOReport instanceReport = instanceReader.execute(null);
assertTrue(instanceReport.isSuccess());
return instanceReader.getInstances();
}
use of eu.esdihumboldt.hale.io.xsd.reader.XmlSchemaReader in project hale by halestudio.
the class FilterTest method loadXMLInstances.
private InstanceCollection loadXMLInstances(URI schemaLocation, URI xmlLocation) throws IOException, IOProviderConfigurationException {
SchemaReader reader = new XmlSchemaReader();
reader.setSharedTypes(null);
reader.setSource(new DefaultInputSupplier(schemaLocation));
IOReport schemaReport = reader.execute(null);
assertTrue(schemaReport.isSuccess());
Schema sourceSchema = reader.getSchema();
//
InstanceReader instanceReader = new XmlInstanceReader();
instanceReader.setSource(new DefaultInputSupplier(xmlLocation));
instanceReader.setSourceSchema(sourceSchema);
IOReport instanceReport = instanceReader.execute(null);
assertTrue(instanceReport.isSuccess());
return instanceReader.getInstances();
}
Aggregations