Search in sources :

Example 1 with IOProviderConfigurationException

use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.

the class SchemaBuilderReader method execute.

@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    progress.begin("Run schema builder", ProgressIndicator.UNKNOWN);
    try {
        CompilerConfiguration compilerConfiguration = new CompilerConfiguration();
        compilerConfiguration.setScriptBaseClass(DelegatingScript.class.getName());
        // Configure the GroovyShell and pass the compiler configuration.
        GroovyShell shell = new GroovyShell(getClass().getClassLoader(), new Binding(), compilerConfiguration);
        DelegatingScript script;
        try (InputStream in = getSource().getInput();
            InputStreamReader reader = new InputStreamReader(in, getCharset())) {
            script = (DelegatingScript) shell.parse(reader);
        }
        SchemaBuilder builder = new SchemaBuilder();
        script.setDelegate(builder);
        Object res = script.run();
        if (res == null) {
            throw new IllegalStateException("Null returned by script");
        } else if (res instanceof Schema) {
            schema = (Schema) res;
        } else if (res instanceof TypeIndex) {
            DefaultSchema s = new DefaultSchema(null, getSource().getLocation());
            for (TypeDefinition type : ((TypeIndex) res).getTypes()) {
                s.addType(type);
            }
            schema = s;
        } else if (res instanceof TypeDefinition) {
            DefaultSchema s = new DefaultSchema(null, getSource().getLocation());
            s.addType((TypeDefinition) res);
            schema = s;
        } else {
            throw new IllegalStateException("Unrecognised return type: " + res.getClass().getName());
        }
        reporter.setSuccess(true);
    } catch (Exception e) {
        reporter.setSuccess(false);
        reporter.error("Error running schema builder", e);
    } finally {
        progress.end();
    }
    return reporter;
}
Also used : Binding(groovy.lang.Binding) InputStreamReader(java.io.InputStreamReader) InputStream(java.io.InputStream) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema) Schema(eu.esdihumboldt.hale.common.schema.model.Schema) GroovyShell(groovy.lang.GroovyShell) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) TypeDefinition(eu.esdihumboldt.hale.common.schema.model.TypeDefinition) TypeIndex(eu.esdihumboldt.hale.common.schema.model.TypeIndex) DelegatingScript(groovy.util.DelegatingScript) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema) CompilerConfiguration(org.codehaus.groovy.control.CompilerConfiguration) SchemaBuilder(eu.esdihumboldt.hale.common.schema.groovy.SchemaBuilder)

Example 2 with IOProviderConfigurationException

use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.

the class SchematronInstanceValidator method execute.

@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    URI schematronLoc = getSchematronLocation();
    if (schematronLoc == null) {
        throw new IOProviderConfigurationException("Providing a schematron file is required");
    }
    progress.begin("Performing Schematron validation", ProgressIndicator.UNKNOWN);
    final InputStream sourceInput = this.getSource().getInput();
    if (sourceInput == null) {
        throw new RuntimeException("No input for Schematron validator");
    }
    final Source xmlSource = new StreamSource(sourceInput);
    final DefaultInputSupplier schematronInputSupplier = new DefaultInputSupplier(schematronLoc);
    final InputStream schematronInput = schematronInputSupplier.getInput();
    if (schematronInput == null) {
        throw new RuntimeException("No rules input for Schematron validator");
    }
    final Source schematronSource = new StreamSource(schematronInput);
    try {
        final SchematronValidator validator = new SchematronValidator(schematronSource);
        final Result result = validator.validate(xmlSource, /* svrlReport */
        true);
        final StringWriter reportWriter = new StringWriter();
        SchematronUtils.convertValidatorResult(result, reportWriter);
        reporter.setSuccess(!validator.ruleViolationsDetected());
        if (validator.ruleViolationsDetected()) {
            SchematronReportParser parser = new SchematronReportParser(reportWriter.toString());
            parser.reportFailedAssertions(reporter);
        }
    } catch (Exception e) {
        reporter.error(new IOMessageImpl("Error running schematron validation", e));
        reporter.setSuccess(false);
    } finally {
        schematronInput.close();
        progress.end();
    }
    return reporter;
}
Also used : SchematronReportParser(eu.esdihumboldt.hale.io.schematron.util.SchematronReportParser) DefaultInputSupplier(eu.esdihumboldt.hale.common.core.io.supplier.DefaultInputSupplier) InputStream(java.io.InputStream) StreamSource(javax.xml.transform.stream.StreamSource) IOMessageImpl(eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl) URI(java.net.URI) StreamSource(javax.xml.transform.stream.StreamSource) Source(javax.xml.transform.Source) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) Result(javax.xml.transform.Result) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) StringWriter(java.io.StringWriter) SchematronValidator(org.opengis.cite.validation.SchematronValidator)

Example 3 with IOProviderConfigurationException

use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.

the class ShapeSchemaReader method loadFromSource.

@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    // $NON-NLS-1$
    progress.begin(Messages.getString("ShapeSchemaProvider.1"), ProgressIndicator.UNKNOWN);
    // DataStore store = new ShapefileDataStoreFactory().createDataStore(location.toURL());
    // DataStore store = FileDataStoreFinder.getDataStore(getSource().getLocation().toURL());
    ShapefileDataStore store = new ShapefileDataStore(getSource().getLocation().toURL());
    store.setCharset(getCharset());
    // TODO namespace from configuration parameter?!
    String namespace = ShapefileConstants.SHAPEFILE_NS;
    DefaultSchema schema = new DefaultSchema(namespace, getSource().getLocation());
    // $NON-NLS-1$
    progress.setCurrentTask(Messages.getString("ShapeSchemaProvider.2"));
    // create type for augmented filename property
    QName filenameTypeName = new QName(SHAPEFILE_AUGMENT_NS, "filenameType");
    TypeDefinition filenameType = null;
    if (getSharedTypes() != null) {
        filenameType = getSharedTypes().getType(filenameTypeName);
    }
    if (filenameType == null) {
        DefaultTypeDefinition fnt = new DefaultTypeDefinition(filenameTypeName);
        fnt.setConstraint(MappableFlag.DISABLED);
        fnt.setConstraint(MappingRelevantFlag.DISABLED);
        fnt.setConstraint(Binding.get(String.class));
        fnt.setConstraint(HasValueFlag.ENABLED);
        filenameType = fnt;
    }
    // build type definitions based on Schema extracted by geotools
    for (Name name : store.getNames()) {
        SimpleFeatureType sft = store.getSchema(name);
        try {
            // create type definition
            DefaultTypeDefinition type = new DefaultTypeDefinition(new QName(namespace, sft.getName().getLocalPart()));
            // constraints on main type
            type.setConstraint(MappingRelevantFlag.ENABLED);
            type.setConstraint(MappableFlag.ENABLED);
            type.setConstraint(HasValueFlag.DISABLED);
            type.setConstraint(AbstractFlag.DISABLED);
            type.setConstraint(Binding.get(Instance.class));
            for (AttributeDescriptor ad : sft.getAttributeDescriptors()) {
                DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(ad.getLocalName()), type, getTypeFromAttributeType(ad.getType(), schema, namespace));
                // set constraints on property
                // nillable
                property.setConstraint(NillableFlag.get(ad.isNillable()));
                // cardinality
                property.setConstraint(Cardinality.get(ad.getMinOccurs(), ad.getMaxOccurs()));
                // set metadata
                property.setLocation(getSource().getLocation());
            }
            // add additional filename property
            // String filename = sft.getName().getLocalPart();
            DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(SHAPEFILE_AUGMENT_NS, AUGMENTED_PROPERTY_FILENAME), type, filenameType);
            property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
            property.setConstraint(NillableFlag.ENABLED);
            schema.addType(type);
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
        progress.setCurrentTask(MessageFormat.format(// $NON-NLS-1$
        Messages.getString("ShapeSchemaProvider.7"), sft.getTypeName()));
    }
    reporter.setSuccess(true);
    return schema;
}
Also used : DefaultPropertyDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultPropertyDefinition) ShapefileDataStore(org.geotools.data.shapefile.ShapefileDataStore) Instance(eu.esdihumboldt.hale.common.instance.model.Instance) QName(javax.xml.namespace.QName) AttributeDescriptor(org.opengis.feature.type.AttributeDescriptor) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) DefaultTypeDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition) TypeDefinition(eu.esdihumboldt.hale.common.schema.model.TypeDefinition) Name(org.opengis.feature.type.Name) QName(javax.xml.namespace.QName) DefaultTypeDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition) SimpleFeatureType(org.opengis.feature.simple.SimpleFeatureType) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema)

Example 4 with IOProviderConfigurationException

use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.

the class ProjectValidator method execute.

/**
 * @see eu.esdihumboldt.hale.common.core.io.impl.AbstractIOProvider#execute(eu.esdihumboldt.hale.common.core.io.ProgressIndicator,
 *      eu.esdihumboldt.hale.common.core.io.report.IOReporter)
 */
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    ValidatorConfigurationService service = getServiceProvider().getService(ValidatorConfigurationService.class);
    if (service == null) {
        reporter.setSuccess(false);
        throw new RuntimeException("Unable to find validator configurations");
    }
    Collection<IOProviderDescriptor> validators = new ArrayList<>();
    validators.addAll(HaleIO.getProviderFactories(ConfigurableInstanceValidator.class));
    List<ValidatorConfiguration> configurations = service.getConfigurations();
    progress.begin("Performing project validation", configurations.size());
    reporter.setSuccess(true);
    SubtaskProgressIndicator subProgress = new SubtaskProgressIndicator(progress);
    int i = 0;
    for (ValidatorConfiguration configuration : configurations) {
        for (IOProviderDescriptor validatorFactory : HaleIO.filterFactoriesByConfigurationType(validators, configuration.getContentType())) {
            try {
                // Assert that the validator can validate the exported
                // content type, skip otherwise
                boolean compatible = validatorFactory.getSupportedTypes().stream().anyMatch(type -> getContentType().isKindOf(type));
                if (!compatible) {
                    reporter.info(new IOMessageImpl(MessageFormat.format("Validator \"{0}\" skipped: cannot validate exported content type \"{1}\"", validatorFactory.getIdentifier(), getContentType().getId()), null));
                    continue;
                }
                ConfigurableInstanceValidator validator = (ConfigurableInstanceValidator) validatorFactory.createExtensionObject();
                subProgress.begin(MessageFormat.format("Executing project validator ({0}/{1})", ++i, configurations.size()), ProgressIndicator.UNKNOWN);
                validator.setSchemas(getSchemas());
                validator.setSource(getSource());
                validator.setContentType(getContentType());
                validator.setServiceProvider(getServiceProvider());
                validator.configure(configuration);
                validator.validate();
                IOReport result = validator.execute(null);
                if (result != null) {
                    reporter.importMessages(result);
                    if (!result.isSuccess()) {
                        reporter.setSuccess(false);
                    }
                }
            } catch (Exception e) {
                reporter.error(new IOMessageImpl("Error running project validator", e));
                reporter.setSuccess(false);
            }
            subProgress.end();
            progress.advance(1);
        }
    }
    progress.end();
    return reporter;
}
Also used : IOProviderDescriptor(eu.esdihumboldt.hale.common.core.io.extension.IOProviderDescriptor) ArrayList(java.util.ArrayList) IOMessageImpl(eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl) IOReport(eu.esdihumboldt.hale.common.core.io.report.IOReport) SubtaskProgressIndicator(eu.esdihumboldt.hale.common.core.io.impl.SubtaskProgressIndicator) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) ValidatorConfigurationService(eu.esdihumboldt.hale.io.validation.service.ValidatorConfigurationService)

Example 5 with IOProviderConfigurationException

use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.

the class PartitioningWFSWriter method execute.

@Override
protected IOReport execute(final ProgressIndicator progress, final IOReporter reporter) throws IOProviderConfigurationException, IOException {
    progress.begin("Upload to WFS-T", IProgressMonitor.UNKNOWN);
    try {
        progress.setCurrentTask("Partitioning data");
        // create the partitioner
        InstanceCollectionPartitioner partitioner = StreamGmlWriter.getPartitioner(this, reporter);
        // partition the graph
        int threshold = getParameter(PARAM_INSTANCES_THRESHOLD).as(Integer.class, DEFAULT_INSTANCES_THRESHOLD);
        try (ResourceIterator<InstanceCollection> parts = partitioner.partition(getInstances(), threshold, reporter)) {
            if (partitioner.requiresImmediateConsumption()) {
                // handle all parts right here, one after another
                int partCount = 0;
                boolean failed = false;
                if (parts.hasNext()) {
                    while (parts.hasNext() && !progress.isCanceled()) {
                        partCount++;
                        SubtaskProgressIndicator partitionProgress = new SubtaskProgressIndicator(progress);
                        partitionProgress.begin("Assembling part " + partCount, ProgressIndicator.UNKNOWN);
                        InstanceCollection part = parts.next();
                        partitionProgress.end();
                        progress.setCurrentTask("Upload part " + partCount + ((part.hasSize()) ? (" (" + part.size() + " instances)") : ("")));
                        IOReport report = uploadInstances(part, reporter, new SubtaskProgressIndicator(progress));
                        if (!report.isSuccess()) {
                            failed = true;
                            reporter.error("Upload of part {0} - {1}", partCount, report.getSummary());
                        } else {
                            reporter.info("Upload of part {0} - {1}", partCount, report.getSummary());
                        }
                    }
                    reporter.setSuccess(!failed && reporter.getErrors().isEmpty());
                    if (!reporter.isSuccess()) {
                        reporter.setSummary("Errors during upload to WFS-T, please see the report.");
                    } else {
                        reporter.setSummary("Successfully uploaded data via WFS-T");
                    }
                } else {
                    reporter.setSuccess(false);
                    reporter.setSummary("Partitioning yielded no instances to upload");
                }
            } else {
                // can start requests with separate thread (potentially
                // threads, but tests with WFSes show that this usually is
                // too much to handle for the service)
                int partCount = 0;
                final AtomicBoolean failed = new AtomicBoolean();
                if (parts.hasNext()) {
                    ExecutorService requestThread = Executors.newSingleThreadExecutor();
                    while (parts.hasNext() && !progress.isCanceled()) {
                        partCount++;
                        SubtaskProgressIndicator partitionProgress = new SubtaskProgressIndicator(// only used for first
                        progress);
                        // partitioning
                        if (partCount == 1)
                            partitionProgress.begin("Assembling part " + partCount, ProgressIndicator.UNKNOWN);
                        // not
                        final InstanceCollection part = parts.next();
                        // safe
                        if (partCount == 1)
                            partitionProgress.end();
                        progress.setCurrentTask("Upload part " + partCount + ((part.hasSize()) ? (" (" + part.size() + " instances)") : ("")));
                        final int currentPart = partCount;
                        requestThread.submit(new Runnable() {

                            @Override
                            public void run() {
                                try {
                                    IOReport report = uploadInstances(part, reporter, new SubtaskProgressIndicator(progress));
                                    if (!report.isSuccess()) {
                                        failed.set(true);
                                        reporter.error(new IOMessageImpl("Upload of part " + currentPart + " - " + report.getSummary(), null));
                                    } else {
                                        reporter.info(new IOMessageImpl("Upload of part " + currentPart + " - " + report.getSummary(), null));
                                    }
                                } catch (Exception e) {
                                    failed.set(true);
                                    reporter.error(new IOMessageImpl("Upload of part " + currentPart + " failed", e));
                                }
                            }
                        });
                    }
                    // wait for requests completion
                    requestThread.shutdown();
                    if (!requestThread.awaitTermination(24, TimeUnit.HOURS)) {
                        reporter.error(new IOMessageImpl("Timeout reached waiting for completion of WFS requests", null));
                    }
                    reporter.setSuccess(!failed.get() && reporter.getErrors().isEmpty());
                    if (!reporter.isSuccess()) {
                        reporter.setSummary("Errors during upload to WFS-T, please see the report.");
                    } else {
                        reporter.setSummary("Successfully uploaded data via WFS-T");
                    }
                } else {
                    reporter.setSuccess(false);
                    reporter.setSummary("Partitioning yielded no instances to upload");
                }
            }
        }
    } catch (Exception e) {
        reporter.error(new IOMessageImpl("Error during attempt to upload to WFS-T", e));
        reporter.setSuccess(false);
    } finally {
        progress.end();
    }
    return reporter;
}
Also used : InstanceCollectionPartitioner(eu.esdihumboldt.hale.common.instance.tools.InstanceCollectionPartitioner) InstanceCollection(eu.esdihumboldt.hale.common.instance.model.InstanceCollection) IOMessageImpl(eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl) IOReport(eu.esdihumboldt.hale.common.core.io.report.IOReport) SubtaskProgressIndicator(eu.esdihumboldt.hale.common.core.io.impl.SubtaskProgressIndicator) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ExecutorService(java.util.concurrent.ExecutorService)

Aggregations

IOProviderConfigurationException (eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException)44 IOException (java.io.IOException)38 IOMessageImpl (eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl)27 InputStream (java.io.InputStream)14 URI (java.net.URI)13 TypeDefinition (eu.esdihumboldt.hale.common.schema.model.TypeDefinition)7 IOReport (eu.esdihumboldt.hale.common.core.io.report.IOReport)6 InstanceCollection (eu.esdihumboldt.hale.common.instance.model.InstanceCollection)6 DefaultSchema (eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema)6 File (java.io.File)6 OutputStream (java.io.OutputStream)6 ArrayList (java.util.ArrayList)6 DefaultTypeDefinition (eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition)5 QName (javax.xml.namespace.QName)5 IOProviderDescriptor (eu.esdihumboldt.hale.common.core.io.extension.IOProviderDescriptor)4 DefaultPropertyDefinition (eu.esdihumboldt.hale.common.schema.model.impl.DefaultPropertyDefinition)4 InputStreamReader (java.io.InputStreamReader)4 Connection (java.sql.Connection)4 SQLException (java.sql.SQLException)4 PathUpdate (eu.esdihumboldt.hale.common.core.io.PathUpdate)3