use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class CSVSchemaReaderTest method testRead3.
/**
* Test for given property names and property types with point as a decimal
* divisor
*
* @throws Exception the Exception thrown if the test fails
*/
@Test
public void testRead3() throws Exception {
String props = "A,B,C,D,E";
CSVSchemaReader schemaReader = new CSVSchemaReader();
schemaReader.setSource(new DefaultInputSupplier(getClass().getResource("/data/test3-pointdecimal.csv").toURI()));
schemaReader.setParameter(CommonSchemaConstants.PARAM_TYPENAME, Value.of("TestTyp"));
schemaReader.setParameter(CSVSchemaReader.PARAM_PROPERTY, Value.of(props));
schemaReader.setParameter(CSVSchemaReader.PARAM_PROPERTYTYPE, Value.of("java.lang.Integer,java.lang.String,java.lang.Float,java.lang.Float,java.lang.String"));
schemaReader.setParameter(CSVSchemaReader.PARAM_SEPARATOR, Value.of(";"));
schemaReader.setParameter(CSVSchemaReader.PARAM_QUOTE, null);
schemaReader.setParameter(CSVSchemaReader.PARAM_ESCAPE, null);
schemaReader.setParameter(CSVSchemaReader.PARAM_DECIMAL, Value.of("."));
IOReport report = schemaReader.execute(new LogProgressIndicator());
assertTrue(report.isSuccess());
Schema schema = schemaReader.getSchema();
assertEquals(1, schema.getMappingRelevantTypes().size());
TypeDefinition type = schema.getMappingRelevantTypes().iterator().next();
assertTrue(type.getName().getLocalPart().equals("TestTyp"));
Iterator<? extends ChildDefinition<?>> it = type.getChildren().iterator();
while (it.hasNext()) {
assertTrue(props.contains(it.next().getName().getLocalPart()));
}
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class CSVSchemaReaderTest method testRead.
/**
* Test for given property names and property types
*
* @throws Exception the Exception thrown if the test fails
*/
@Test
public void testRead() throws Exception {
String props = "muh,kuh,bla,blub";
CSVSchemaReader schemaReader = new CSVSchemaReader();
schemaReader.setSource(new DefaultInputSupplier(getClass().getResource("/data/test1.csv").toURI()));
schemaReader.setParameter(CommonSchemaConstants.PARAM_TYPENAME, Value.of("TestTyp"));
schemaReader.setParameter(CSVSchemaReader.PARAM_PROPERTY, Value.of(props));
schemaReader.setParameter(CSVSchemaReader.PARAM_PROPERTYTYPE, Value.of("java.lang.String,java.lang.String,java.lang.String,java.lang.String"));
schemaReader.setParameter(CSVSchemaReader.PARAM_SEPARATOR, null);
schemaReader.setParameter(CSVSchemaReader.PARAM_QUOTE, null);
schemaReader.setParameter(CSVSchemaReader.PARAM_ESCAPE, null);
IOReport report = schemaReader.execute(new LogProgressIndicator());
assertTrue(report.isSuccess());
Schema schema = schemaReader.getSchema();
assertEquals(1, schema.getMappingRelevantTypes().size());
TypeDefinition type = schema.getMappingRelevantTypes().iterator().next();
assertTrue(type.getName().getLocalPart().equals("TestTyp"));
Iterator<? extends ChildDefinition<?>> it = type.getChildren().iterator();
while (it.hasNext()) {
assertTrue(props.contains(it.next().getName().getLocalPart()));
}
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class FilterTest method loadXML.
@Before
public void loadXML() throws Exception {
if (init == false) {
SchemaReader reader = new XmlSchemaReader();
reader.setSharedTypes(null);
reader.setSource(new DefaultInputSupplier((getClass().getResource("/testdata/inspire3/HydroPhysicalWaters.xsd").toURI())));
IOReport report = reader.execute(null);
assertTrue(report.isSuccess());
Schema schema = reader.getSchema();
StreamGmlReader instanceReader = new GmlInstanceReader();
instanceReader.setSource(new DefaultInputSupplier(getClass().getResource("/testdata/out/transformWrite_ERM_HPW.gml").toURI()));
instanceReader.setSourceSchema(schema);
instanceReader.validate();
report = instanceReader.execute(null);
assertTrue(report.isSuccess());
FilterTest.complexinstances = instanceReader.getInstances();
assertFalse(FilterTest.complexinstances.isEmpty());
init = true;
}
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class FilterTest method simpleSchemaTestCQL.
@Test
public void simpleSchemaTestCQL() throws Exception {
ShapeSchemaReader schemaReader = new ShapeSchemaReader();
schemaReader.setSource(new DefaultInputSupplier(getClass().getResource("/testdata/GN_Point/GN_Point.shp").toURI()));
schemaReader.validate();
IOReport report = schemaReader.execute(null);
assertTrue(report.isSuccess());
Schema schema = schemaReader.getSchema();
ShapeInstanceReader instanceReader = new ShapeInstanceReader();
instanceReader.setSource(new DefaultInputSupplier(getClass().getResource("/testdata/GN_Point/GN_Point.shp").toURI()));
instanceReader.setSourceSchema(schema);
instanceReader.validate();
report = instanceReader.execute(null);
assertTrue(report.isSuccess());
InstanceCollection instances = instanceReader.getInstances();
assertFalse(instances.isEmpty());
ResourceIterator<Instance> ri = instances.iterator();
try {
boolean foundIt = false;
boolean stayFalse = false;
boolean stayFalseToo = false;
Filter cqlfilter = new FilterGeoCqlImpl("NEV = 'Piritulus'");
Filter foulfilter = new FilterGeoCqlImpl("HERP = 'DERP'");
Filter foulfilter1 = new FilterGeoCqlImpl("NEV = 'HURR'");
while (ri.hasNext()) {
Instance inst = ri.next();
assertNotNull(inst);
if (cqlfilter.match(inst)) {
foundIt = true;
}
if (foulfilter.match(inst)) {
stayFalse = true;
}
if (foulfilter1.match(inst)) {
stayFalseToo = true;
}
}
assertTrue(foundIt);
assertFalse(stayFalse);
assertFalse(stayFalseToo);
} finally {
ri.close();
}
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class AbstractBaseAlignmentLoader method generatePrefixMapping.
/**
* Function to fill the prefixMapping and alignmentToInfo maps.
*
* @param start the main alignment representation
* @param prefixMapping gets filled with a mapping from local to global
* prefixes
* @param alignmentToInfo gets filled with a mapping from base alignment
* representations to prefixes and URIs
* @param updater the location updater to use for base alignments
* @param reporter the reporter
* @throws IOException if a base alignment couldn't be loaded
*/
private void generatePrefixMapping(A start, Map<A, Map<String, String>> prefixMapping, Map<A, AlignmentInfo> alignmentToInfo, PathUpdate updater, IOReporter reporter) throws IOException {
// XXX What if the project file path would change?
// Alignment is a project file, so it is in the same directory.
URI currentAbsolute = updater.getNewLocation();
Map<String, URI> base = getBases(start);
// also a mapping for this alignment itself in case the same URI is
// defined for two prefixes
prefixMapping.put(start, new HashMap<String, String>());
// set of already seen URIs
Set<URI> knownURIs = new HashSet<URI>();
// reverse map of base
Map<URI, String> uriToPrefix = new HashMap<URI, String>();
// queue of base alignments to process
LinkedList<URIPair> queue = new LinkedList<URIPair>();
// check base for doubles, and invert it for later
for (Entry<String, URI> baseEntry : base.entrySet()) {
URI rawBaseURI = baseEntry.getValue();
URI usedBaseURI = updater.findLocation(rawBaseURI, true, false, true);
if (usedBaseURI == null) {
throw new IOException("Couldn't load an included alignment (" + rawBaseURI + "). File not found.", null);
}
URI absoluteBaseURI = usedBaseURI;
if (!absoluteBaseURI.isAbsolute())
absoluteBaseURI = currentAbsolute.resolve(absoluteBaseURI);
if (knownURIs.contains(absoluteBaseURI)) {
reporter.warn(new IOMessageImpl("The same base alignment (" + rawBaseURI + ") was included twice.", null));
prefixMapping.get(start).put(baseEntry.getKey(), uriToPrefix.get(absoluteBaseURI));
} else {
knownURIs.add(absoluteBaseURI);
prefixMapping.get(start).put(baseEntry.getKey(), baseEntry.getKey());
uriToPrefix.put(absoluteBaseURI, baseEntry.getKey());
queue.add(new URIPair(absoluteBaseURI, usedBaseURI));
}
}
// find all alignments to load (also missing ones) and load the beans
while (!queue.isEmpty()) {
URIPair baseURI = queue.pollFirst();
A baseA;
try {
baseA = loadAlignment(new DefaultInputSupplier(baseURI.absoluteURI).getInput(), reporter);
} catch (IOException e) {
throw new IOException("Couldn't load an included alignment (" + baseURI + ").", e);
}
// add to alignment info map
alignmentToInfo.put(baseA, new AlignmentInfo(uriToPrefix.get(baseURI.absoluteURI), baseURI));
prefixMapping.put(baseA, new HashMap<String, String>());
// load "missing" base alignments, too, add prefix mapping
for (Entry<String, URI> baseEntry : getBases(baseA).entrySet()) {
// rawURI may be relative
URI rawURI = baseEntry.getValue();
URI absoluteURI = baseURI.absoluteURI.resolve(rawURI);
// try updater again, it might help, and it shows whether the
// file is readable
absoluteURI = updater.findLocation(absoluteURI, true, false, false);
if (absoluteURI == null)
throw new IOException("Couldn't find an included alignment (" + rawURI + ").");
URI usedURI = absoluteURI;
// a relative URI.
if (!baseURI.usedURI.isAbsolute() && !rawURI.isAbsolute())
usedURI = IOUtils.getRelativePath(absoluteURI, currentAbsolute);
if (!knownURIs.contains(absoluteURI)) {
reporter.info(new IOMessageImpl("A base alignment referenced another base alignment (" + absoluteURI + ") that was not yet known. It is now included, too.", null));
queue.add(new URIPair(absoluteURI, usedURI));
knownURIs.add(absoluteURI);
String prefix = generatePrefix(base.keySet());
base.put(prefix, usedURI);
uriToPrefix.put(absoluteURI, prefix);
prefixMapping.get(start).put(prefix, prefix);
}
// add prefix mapping
prefixMapping.get(baseA).put(baseEntry.getKey(), uriToPrefix.get(absoluteURI));
}
}
}
Aggregations