use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class StreamGmlWriterTest method validate.
/**
* Validate an XML file against the given schemas.
*
* @param xmlLocation the location of the XML file
* @param validationSchemas schemas needed for validation
* @return the validation report
* @throws IOProviderConfigurationException if the validator is not
* configured correctly
* @throws IOException if I/O operations fail
*/
public static IOReport validate(URI xmlLocation, List<? extends Locatable> validationSchemas) throws IOProviderConfigurationException, IOException {
XmlInstanceValidator validator = new XmlInstanceValidator();
validator.setSchemas(validationSchemas.toArray(new Locatable[validationSchemas.size()]));
validator.setSource(new DefaultInputSupplier(xmlLocation));
return validator.execute(null);
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class StreamGmlWriter method findDefaultContainter.
/**
* Find the default container element.
*
* @param targetIndex the target type index
* @param reporter the reporter, may be <code>null</code>
* @return the container XML element or <code>null</code>
*/
protected XmlElement findDefaultContainter(XmlIndex targetIndex, IOReporter reporter) {
if (useFeatureCollection) {
// try to find FeatureCollection element
Iterator<XmlElement> it = targetIndex.getElements().values().iterator();
Collection<XmlElement> fcElements = new HashSet<XmlElement>();
while (it.hasNext()) {
XmlElement el = it.next();
if (isFeatureCollection(el)) {
fcElements.add(el);
}
}
if (fcElements.isEmpty() && gmlNs != null && gmlNs.equals(NS_GML)) {
// include WFS 1.0.0 for the FeatureCollection element
try {
URI location = StreamGmlWriter.class.getResource("/schemas/wfs/1.0.0/WFS-basic.xsd").toURI();
XmlSchemaReader schemaReader = new XmlSchemaReader();
schemaReader.setSource(new DefaultInputSupplier(location));
// FIXME to work with the extra schema it must be integrated
// with the main schema
// schemaReader.setSharedTypes(sharedTypes);
IOReport report = schemaReader.execute(null);
if (report.isSuccess()) {
XmlIndex wfsSchema = schemaReader.getSchema();
// look for FeatureCollection element
for (XmlElement el : wfsSchema.getElements().values()) {
if (isFeatureCollection(el)) {
fcElements.add(el);
}
}
// add as additional schema, replace location for
// verification
additionalSchemas.put(wfsSchema.getNamespace(), new SchemaDecorator(wfsSchema) {
@Override
public URI getLocation() {
return URI.create("http://schemas.opengis.net/wfs/1.0.0/WFS-basic.xsd");
}
});
// add namespace
// $NON-NLS-1$
GmlWriterUtil.addNamespace(writer, wfsSchema.getNamespace(), "wfs");
}
} catch (Exception e) {
// $NON-NLS-1$
log.warn("Using WFS schema for the FeatureCollection definition failed", e);
}
}
if (fcElements.isEmpty() && reporter != null) {
reporter.warn(// $NON-NLS-1$
new IOMessageImpl("No element describing a FeatureCollection found", null));
} else {
// select fc element TODO priorized selection (root element
// parameters)
XmlElement fcElement = fcElements.iterator().next();
log.info(// $NON-NLS-1$ //$NON-NLS-2$
"Found " + fcElements.size() + " possible FeatureCollection elements" + ", using element " + // $NON-NLS-1$
fcElement.getName());
return fcElement;
}
}
return null;
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class ExamplesContent method getMappingContent.
/**
* Get the mapping documentation content for an example project.
*
* @param projectId the project ID
* @return the mapping documentation content stream or <code>null</code>
*/
private InputStream getMappingContent(String projectId) {
if (!mappingDocExportInitialized) {
mappingDocExport = HaleIO.createIOProvider(AlignmentWriter.class, null, ID_MAPPING_EXPORT);
if (mappingDocExport == null) {
log.error("Could not create mapping documentation exporter.");
}
mappingDocExportInitialized = true;
}
if (mappingDocExport == null) {
// no mapping documentation export possible
return null;
}
if (tempMappingDir == null) {
tempMappingDir = Files.createTempDir();
tempMappingDir.deleteOnExit();
}
// the file of the mapping documentation
File mappingDoc = new File(tempMappingDir, projectId + ".html");
if (!mappingDoc.exists()) {
ATransaction trans = log.begin("Generate example mapping documentation");
try {
// create the mapping documentation
ExampleProject exampleProject = ExampleProjectExtension.getInstance().get(projectId);
final Project project = (Project) exampleProject.getInfo();
// determine alignment location - contained in project file, not
// a resource
URI alignmentLoc = exampleProject.getAlignmentLocation();
if (alignmentLoc == null) {
// no alignment present
return null;
}
// store configurations per action ID
Multimap<String, IOConfiguration> confs = HashMultimap.create();
for (IOConfiguration conf : project.getResources()) {
confs.put(conf.getActionId(), conf);
}
// load schemas
// source schemas
LoadSchemaAdvisor source = new LoadSchemaAdvisor(SchemaSpaceID.SOURCE);
for (IOConfiguration conf : confs.get(SchemaIO.ACTION_LOAD_SOURCE_SCHEMA)) {
source.setConfiguration(conf);
executeProvider(source, conf.getProviderId(), null);
}
// target schemas
LoadSchemaAdvisor target = new LoadSchemaAdvisor(SchemaSpaceID.TARGET);
for (IOConfiguration conf : confs.get(SchemaIO.ACTION_LOAD_TARGET_SCHEMA)) {
target.setConfiguration(conf);
executeProvider(target, conf.getProviderId(), null);
}
// load alignment
// manual loading needed, as we can't rely on the environment
// alignment advisor
DefaultInputSupplier alignmentIn = new DefaultInputSupplier(alignmentLoc);
AlignmentReader reader = HaleIO.findIOProvider(AlignmentReader.class, alignmentIn, alignmentLoc.getPath());
LoadAlignmentAdvisor alignmentAdvisor = new LoadAlignmentAdvisor(null, source.getSchemaSpace(), target.getSchemaSpace(), exampleProject.getUpdater());
reader.setSource(alignmentIn);
executeProvider(alignmentAdvisor, null, reader);
Alignment alignment = alignmentAdvisor.getAlignment();
if (alignment != null) {
// save alignment docu
synchronized (mappingDocExport) {
// only a single instance
mappingDocExport.setAlignment(alignment);
mappingDocExport.setTarget(new FileIOSupplier(mappingDoc));
if (mappingDocExport instanceof ProjectInfoAware) {
ProjectInfo smallInfo = new ProjectInfo() {
@Override
public String getName() {
return project.getName();
}
@Override
public Date getModified() {
return null;
}
@Override
public Version getHaleVersion() {
return null;
}
@Override
public String getDescription() {
return project.getDescription();
}
@Override
public Date getCreated() {
return null;
}
@Override
public String getAuthor() {
return project.getAuthor();
}
};
// project);
((ProjectInfoAware) mappingDocExport).setProjectInfo(smallInfo);
}
mappingDocExport.execute(null);
}
mappingDoc.deleteOnExit();
}
} catch (Throwable e) {
log.error("Error generating mapping documentation for example project", e);
return null;
} finally {
trans.end();
}
}
if (mappingDoc.exists()) {
try {
return new FileInputStream(mappingDoc);
} catch (FileNotFoundException e) {
return null;
}
} else
return null;
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class GenerateDefaults method loadSchema.
private void loadSchema() throws IOProviderConfigurationException, IOException {
System.out.println("Loading schema...");
LocatableInputSupplier<? extends InputStream> schemaIn = new DefaultInputSupplier(context.getSchema());
SchemaReader schemaReader = HaleIO.findIOProvider(SchemaReader.class, schemaIn, context.getSchema().getPath());
schemaReader.setSource(schemaIn);
IOReport report = schemaReader.execute(new NullProgressIndicator());
if (!report.isSuccess() || !report.getErrors().isEmpty()) {
throw new IllegalStateException("Failed to load schema");
}
schema = schemaReader.getSchema();
}
use of eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier in project hale by halestudio.
the class GenerateDuplicates method loadProject.
private boolean loadProject() throws IOException {
final AtomicBoolean success = new AtomicBoolean(true);
LocatableInputSupplier<? extends InputStream> projectIn = new DefaultInputSupplier(context.getProject());
ProjectTransformationEnvironment env = new ProjectTransformationEnvironment("sample", projectIn, new ReportHandler() {
@Override
public void publishReport(Report<?> report) {
if (report.isSuccess() && report.getErrors().isEmpty()) {
System.out.println(report.getSummary());
} else {
System.err.println("Error loading project: " + report.getSummary());
success.set(false);
}
}
});
if (success.get()) {
this.sourceSchema = env.getSourceSchema();
this.targetSchema = env.getTargetSchema();
this.examples = env.getAlignment();
return true;
}
return false;
}
Aggregations