use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.
the class ShapeInstanceReader method execute.
/**
* @see AbstractIOProvider#execute(ProgressIndicator, IOReporter)
*/
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
// $NON-NLS-1$
progress.begin(Messages.getString("ShapeSchemaProvider.1"), ProgressIndicator.UNKNOWN);
// DataStore store = new ShapefileDataStoreFactory().createDataStore(location.toURL());
// DataStore store = FileDataStoreFinder.getDataStore(getSource().getLocation().toURL());
ShapefileDataStore store = new ShapefileDataStore(getSource().getLocation().toURL());
store.setCharset(getCharset());
progress.setCurrentTask("Extracting shape instances");
String typename = getParameter(PARAM_TYPENAME).as(String.class);
TypeDefinition defaultType = null;
if (typename != null && !typename.isEmpty()) {
try {
defaultType = getSourceSchema().getType(QName.valueOf(typename));
} catch (Exception e) {
// ignore
}
}
if (defaultType == null) {
// check if typename was supplied w/o namespace
try {
defaultType = getSourceSchema().getType(new QName(ShapefileConstants.SHAPEFILE_NS, typename));
} catch (Exception e) {
// ignore
// TODO report?
}
}
if (defaultType == null) {
reporter.info(new IOMessageImpl("No type name supplied as parameter, trying to auto-detect the schema type.", null));
TypeDefinition dataType = ShapeSchemaReader.readShapeType(getSource());
if (dataType == null) {
throw new IOException("Could not read shapefile structure information");
}
String preferredName = null;
Name name = store.getNames().iterator().next();
if (name != null) {
preferredName = name.getLocalPart();
}
Pair<TypeDefinition, Integer> tp = getMostCompatibleShapeType(getSourceSchema(), dataType, preferredName);
if (tp == null) {
throw new IOProviderConfigurationException("No schema type specified and auto-detection failed");
}
defaultType = tp.getFirst();
reporter.info(new IOMessageImpl(MessageFormat.format("Auto-deteted {0} as schema type, with a {1}% compatibility rating.", defaultType.getName(), tp.getSecond()), null));
}
Map<TypeDefinition, InstanceCollection> collections = new HashMap<>();
// create a collection for each type
for (Name name : store.getNames()) {
SimpleFeatureSource features = store.getFeatureSource(name);
TypeDefinition type = defaultType;
if (type == null) {
QName typeName = new QName(ShapefileConstants.SHAPEFILE_NS, name.getLocalPart());
type = getSourceSchema().getType(typeName);
}
collections.put(type, new ShapesInstanceCollection(features, type, getCrsProvider(), name.getLocalPart()));
}
instances = new PerTypeInstanceCollection(collections);
reporter.setSuccess(true);
return reporter;
}
use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.
the class ValidatorConfigurationReaderImpl method execute.
/**
* @see eu.esdihumboldt.hale.common.core.io.impl.AbstractIOProvider#execute(eu.esdihumboldt.hale.common.core.io.ProgressIndicator,
* eu.esdihumboldt.hale.common.core.io.report.IOReporter)
*/
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
progress.begin("Loading validator configuration.", ProgressIndicator.UNKNOWN);
final URI sourceLocation = getSource().getLocation();
if (sourceLocation == null) {
throw new IOProviderConfigurationException("No source location provided when trying to read validator configuration.");
}
final DefaultInputSupplier validationRuleInputSupplier = new DefaultInputSupplier(sourceLocation);
final InputStream validationRuleInput = validationRuleInputSupplier.getInput();
if (validationRuleInput == null) {
throw new IOProviderConfigurationException("Cannot read validator configuration.");
}
try {
// XXX UTF 8 encoding is assumed here. The actual encoding should be
// detected or be configurable
configuration = new ValidatorConfiguration(IOUtils.toString(validationRuleInput, StandardCharsets.UTF_8), sourceLocation, getContentType());
reporter.setSuccess(true);
} catch (Exception e) {
throw new IOProviderConfigurationException(MessageFormat.format("Could not read validation rule from '{0}': {1}", sourceLocation.toString(), e.getMessage()), e);
} finally {
IOUtils.closeQuietly(validationRuleInput);
}
progress.setCurrentTask("Validation rule loaded.");
return reporter;
}
use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.
the class CSVSchemaReader method loadFromSource.
@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
// $NON-NLS-1$
progress.begin("Load CSV schema", ProgressIndicator.UNKNOWN);
String namespace = CSVFileIO.CSVFILE_NS;
DefaultSchema schema = new DefaultSchema(namespace, getSource().getLocation());
CSVReader reader = CSVUtil.readFirst(this);
try {
// initializes the first line of the table (names of the columns)
firstLine = reader.readNext();
// create type definition
String typename = getParameter(CommonSchemaConstants.PARAM_TYPENAME).as(String.class);
if (typename == null || typename.isEmpty()) {
reporter.setSuccess(false);
reporter.error(new IOMessageImpl("No Typename was set", null));
return null;
}
DefaultTypeDefinition type = new DefaultTypeDefinition(new QName(typename));
// constraints on main type
type.setConstraint(MappingRelevantFlag.ENABLED);
type.setConstraint(MappableFlag.ENABLED);
type.setConstraint(HasValueFlag.DISABLED);
type.setConstraint(AbstractFlag.DISABLED);
// set metadata for main type
type.setLocation(getSource().getLocation());
StringBuffer defaultPropertyTypeBuffer = new StringBuffer();
String[] comboSelections;
if (getParameter(PARAM_PROPERTYTYPE).isEmpty()) {
for (int i = 0; i < firstLine.length; i++) {
defaultPropertyTypeBuffer.append("java.lang.String");
defaultPropertyTypeBuffer.append(",");
}
defaultPropertyTypeBuffer.deleteCharAt(defaultPropertyTypeBuffer.lastIndexOf(","));
String combs = defaultPropertyTypeBuffer.toString();
comboSelections = combs.split(",");
} else {
comboSelections = getParameter(PARAM_PROPERTYTYPE).as(String.class).split(",");
}
String[] properties;
if (getParameter(PARAM_PROPERTY).isEmpty()) {
properties = firstLine;
} else {
properties = getParameter(PARAM_PROPERTY).as(String.class).split(",");
}
// than the entries in the first line
if ((firstLine.length != properties.length && properties.length != 0) || (firstLine.length != comboSelections.length && comboSelections.length != 0)) {
fail("Not the same number of entries for property names, property types and words in the first line of the file");
}
for (int i = 0; i < comboSelections.length; i++) {
PropertyType propertyType;
propertyType = PropertyTypeExtension.getInstance().getFactory(comboSelections[i]).createExtensionObject();
DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(properties[i]), type, propertyType.getTypeDefinition());
// set constraints on property
// property.setConstraint(NillableFlag.DISABLED); // nillable
// nillable FIXME
property.setConstraint(NillableFlag.ENABLED);
// should be
// configurable
// per field
// (see also
// CSVInstanceReader)
// cardinality
property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
// set metadata for property
property.setLocation(getSource().getLocation());
}
boolean skip = Arrays.equals(properties, firstLine);
type.setConstraint(new CSVConfiguration(CSVUtil.getSep(this), CSVUtil.getQuote(this), CSVUtil.getEscape(this), skip));
schema.addType(type);
} catch (Exception ex) {
reporter.error(new IOMessageImpl("Cannot load csv schema", ex));
reporter.setSuccess(false);
return null;
}
reporter.setSuccess(true);
return schema;
}
use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.
the class XsltExport method execute.
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
File templateDir = Files.createTempDir();
progress.begin("Generate XSLT", ProgressIndicator.UNKNOWN);
try {
log.info("Template directory: " + templateDir.getAbsolutePath());
XmlIndex targetIndex = StreamGmlWriter.getXMLIndex(getTargetSchema());
if (targetIndex == null) {
throw new IllegalStateException("Target schema contains no XML schema");
}
XmlIndex sourceIndex = StreamGmlWriter.getXMLIndex(getSourceSchema());
if (sourceIndex == null) {
throw new IllegalStateException("Source schema contains no XML schema");
}
init(sourceIndex, targetIndex);
XmlElement containerElement = StreamGmlWriter.getConfiguredContainerElement(this, targetIndex);
if (containerElement == null) {
throw new IllegalStateException("No target container element specified");
}
XsltGenerator generator = new XsltGenerator(templateDir, getAlignment(), sourceIndex, targetIndex, reporter, progress, containerElement, getSourceContext(), projectInfo) {
@Override
protected void writeContainerIntro(XMLStreamWriter writer, XsltGenerationContext context) throws XMLStreamException, IOException {
XsltExport.this.writeContainerIntro(writer, context);
}
};
return generator.write(getTarget());
} catch (Exception e) {
reporter.error(new IOMessageImpl("XSLT generation failed", e));
reporter.setSuccess(false);
return reporter;
} finally {
progress.end();
try {
FileUtils.deleteDirectory(templateDir);
} catch (Exception e) {
// failure to delete the directory is not fatal
log.warn("Failed to delete temporary directory", e);
}
}
}
use of eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException in project hale by halestudio.
the class DefaultProjectReader method execute.
/**
* @see AbstractIOProvider#execute(ProgressIndicator, IOReporter)
*/
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
progress.begin("Load project", ProgressIndicator.UNKNOWN);
InputStream in = getSource().getInput();
if (archive) {
// read from archive
ZipInputStream zip = new ZipInputStream(new BufferedInputStream(in));
try {
ZipEntry entry;
while ((entry = zip.getNextEntry()) != null) {
String name = entry.getName();
progress.setCurrentTask(MessageFormat.format("Load {0}", name));
if (name.equals(ProjectIO.PROJECT_FILE)) {
try {
setProjectChecked(Project.load(new EntryInputStream(zip)), reporter);
} catch (Exception e) {
// fail if main project file cannot be loaded
throw new IOProviderConfigurationException("Source is no valid project archive", e);
}
} else {
ProjectFile file = getProjectFiles().get(name);
if (file != null) {
try {
file.load(new EntryInputStream(zip));
} catch (Exception e) {
reporter.error(new IOMessageImpl("Error while loading project file {0}, file will be reset.", e, -1, -1, name));
// reset file
file.reset();
}
}
}
}
} finally {
zip.close();
}
} else {
// read from XML
try {
setProjectChecked(Project.load(in), reporter);
} catch (Exception e) {
// fail if main project file cannot be loaded
throw new IOProviderConfigurationException("Source is no valid project file", e);
} finally {
in.close();
}
}
URI oldProjectLocation;
if (getProject().getSaveConfiguration() == null) {
oldProjectLocation = getSource().getLocation();
} else {
oldProjectLocation = URI.create(getProject().getSaveConfiguration().getProviderConfiguration().get(ExportProvider.PARAM_TARGET).as(String.class));
}
PathUpdate update = new PathUpdate(oldProjectLocation, getSource().getLocation());
// check if there are any external project files listed
if (getProjectFiles() != null) {
// only if project files set at all
for (ProjectFileInfo fileInfo : getProject().getProjectFiles()) {
ProjectFile projectFile = getProjectFiles().get(fileInfo.getName());
if (projectFile != null) {
URI location = fileInfo.getLocation();
location = update.findLocation(location, false, DefaultInputSupplier.SCHEME_LOCAL.equals(getSource().getLocation().getScheme()), false);
if (location == null && getSource().getLocation() != null) {
// 1st try: appending file name to project location
try {
URI candidate = new URI(getSource().getLocation().toString() + "." + fileInfo.getName());
if (HaleIO.testStream(candidate, true)) {
location = candidate;
}
} catch (URISyntaxException e) {
// ignore
}
// 2nd try: file name next to project
if (location != null) {
try {
String projectLoc = getSource().getLocation().toString();
int index = projectLoc.lastIndexOf('/');
if (index > 0) {
URI candidate = new URI(projectLoc.substring(0, index + 1) + fileInfo.getName());
if (HaleIO.testStream(candidate, true)) {
location = candidate;
}
}
} catch (URISyntaxException e) {
// ignore
}
}
}
boolean fileSuccess = false;
if (location != null) {
try {
DefaultInputSupplier dis = new DefaultInputSupplier(location);
try (InputStream input = dis.getInput()) {
projectFile.load(input);
fileSuccess = true;
} catch (Exception e) {
// hand down
throw e;
}
} catch (Exception e) {
reporter.error(new IOMessageImpl("Loading project file failed", e));
}
}
if (!fileSuccess) {
reporter.error(new IOMessageImpl("Error while loading project file {0}, file will be reset.", null, -1, -1, fileInfo.getName()));
projectFile.reset();
}
} else {
reporter.info(new IOMessageImpl("No handler for external project file {0} found.", null, -1, -1, fileInfo.getName()));
}
}
}
// clear project infos
/*
* XXX was there any particular reason why this was done? I suspect it
* was done so when saving the project this information is not saved
* again as-is, but on the basis of actual files written. However, this
* case is handled in the project writer already.
*
* As this information is in fact necessary when trying to identify
* certain files like the alignment, clearing the list of project files
* was commented out.
*/
// getProject().getProjectFiles().clear();
progress.end();
reporter.setSuccess(true);
return reporter;
}
Aggregations