use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class StreamGmlWriter method findDefaultContainter.
/**
* Find the default container element.
*
* @param targetIndex the target type index
* @param reporter the reporter, may be <code>null</code>
* @return the container XML element or <code>null</code>
*/
protected XmlElement findDefaultContainter(XmlIndex targetIndex, IOReporter reporter) {
if (useFeatureCollection) {
// try to find FeatureCollection element
Iterator<XmlElement> it = targetIndex.getElements().values().iterator();
Collection<XmlElement> fcElements = new HashSet<XmlElement>();
while (it.hasNext()) {
XmlElement el = it.next();
if (isFeatureCollection(el)) {
fcElements.add(el);
}
}
if (fcElements.isEmpty() && gmlNs != null && gmlNs.equals(NS_GML)) {
// include WFS 1.0.0 for the FeatureCollection element
try {
URI location = StreamGmlWriter.class.getResource("/schemas/wfs/1.0.0/WFS-basic.xsd").toURI();
XmlSchemaReader schemaReader = new XmlSchemaReader();
schemaReader.setSource(new DefaultInputSupplier(location));
// FIXME to work with the extra schema it must be integrated
// with the main schema
// schemaReader.setSharedTypes(sharedTypes);
IOReport report = schemaReader.execute(null);
if (report.isSuccess()) {
XmlIndex wfsSchema = schemaReader.getSchema();
// look for FeatureCollection element
for (XmlElement el : wfsSchema.getElements().values()) {
if (isFeatureCollection(el)) {
fcElements.add(el);
}
}
// add as additional schema, replace location for
// verification
additionalSchemas.put(wfsSchema.getNamespace(), new SchemaDecorator(wfsSchema) {
@Override
public URI getLocation() {
return URI.create("http://schemas.opengis.net/wfs/1.0.0/WFS-basic.xsd");
}
});
// add namespace
// $NON-NLS-1$
GmlWriterUtil.addNamespace(writer, wfsSchema.getNamespace(), "wfs");
}
} catch (Exception e) {
// $NON-NLS-1$
log.warn("Using WFS schema for the FeatureCollection definition failed", e);
}
}
if (fcElements.isEmpty() && reporter != null) {
reporter.warn(// $NON-NLS-1$
new IOMessageImpl("No element describing a FeatureCollection found", null));
} else {
// select fc element TODO priorized selection (root element
// parameters)
XmlElement fcElement = fcElements.iterator().next();
log.info(// $NON-NLS-1$ //$NON-NLS-2$
"Found " + fcElements.size() + " possible FeatureCollection elements" + ", using element " + // $NON-NLS-1$
fcElement.getName());
return fcElement;
}
}
return null;
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class CSVSchemaReader method loadFromSource.
@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
// $NON-NLS-1$
progress.begin("Load CSV schema", ProgressIndicator.UNKNOWN);
String namespace = CSVFileIO.CSVFILE_NS;
DefaultSchema schema = new DefaultSchema(namespace, getSource().getLocation());
CSVReader reader = CSVUtil.readFirst(this);
try {
// initializes the first line of the table (names of the columns)
firstLine = reader.readNext();
// create type definition
String typename = getParameter(CommonSchemaConstants.PARAM_TYPENAME).as(String.class);
if (typename == null || typename.isEmpty()) {
reporter.setSuccess(false);
reporter.error(new IOMessageImpl("No Typename was set", null));
return null;
}
DefaultTypeDefinition type = new DefaultTypeDefinition(new QName(typename));
// constraints on main type
type.setConstraint(MappingRelevantFlag.ENABLED);
type.setConstraint(MappableFlag.ENABLED);
type.setConstraint(HasValueFlag.DISABLED);
type.setConstraint(AbstractFlag.DISABLED);
// set metadata for main type
type.setLocation(getSource().getLocation());
StringBuffer defaultPropertyTypeBuffer = new StringBuffer();
String[] comboSelections;
if (getParameter(PARAM_PROPERTYTYPE).isEmpty()) {
for (int i = 0; i < firstLine.length; i++) {
defaultPropertyTypeBuffer.append("java.lang.String");
defaultPropertyTypeBuffer.append(",");
}
defaultPropertyTypeBuffer.deleteCharAt(defaultPropertyTypeBuffer.lastIndexOf(","));
String combs = defaultPropertyTypeBuffer.toString();
comboSelections = combs.split(",");
} else {
comboSelections = getParameter(PARAM_PROPERTYTYPE).as(String.class).split(",");
}
String[] properties;
if (getParameter(PARAM_PROPERTY).isEmpty()) {
properties = firstLine;
} else {
properties = getParameter(PARAM_PROPERTY).as(String.class).split(",");
}
// than the entries in the first line
if ((firstLine.length != properties.length && properties.length != 0) || (firstLine.length != comboSelections.length && comboSelections.length != 0)) {
fail("Not the same number of entries for property names, property types and words in the first line of the file");
}
for (int i = 0; i < comboSelections.length; i++) {
PropertyType propertyType;
propertyType = PropertyTypeExtension.getInstance().getFactory(comboSelections[i]).createExtensionObject();
DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(properties[i]), type, propertyType.getTypeDefinition());
// set constraints on property
// property.setConstraint(NillableFlag.DISABLED); // nillable
// nillable FIXME
property.setConstraint(NillableFlag.ENABLED);
// should be
// configurable
// per field
// (see also
// CSVInstanceReader)
// cardinality
property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
// set metadata for property
property.setLocation(getSource().getLocation());
}
boolean skip = Arrays.equals(properties, firstLine);
type.setConstraint(new CSVConfiguration(CSVUtil.getSep(this), CSVUtil.getQuote(this), CSVUtil.getEscape(this), skip));
schema.addType(type);
} catch (Exception ex) {
reporter.error(new IOMessageImpl("Cannot load csv schema", ex));
reporter.setSuccess(false);
return null;
}
reporter.setSuccess(true);
return schema;
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class CSVInstanceWriter method execute.
/**
* @see eu.esdihumboldt.hale.common.core.io.impl.AbstractIOProvider#execute(eu.esdihumboldt.hale.common.core.io.ProgressIndicator,
* eu.esdihumboldt.hale.common.core.io.report.IOReporter)
*/
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
boolean solveNestedProperties = getParameter(InstanceTableIOConstants.SOLVE_NESTED_PROPERTIES).as(Boolean.class, false);
// XXX what does "solve nested properties" mean?
// get separation, quote and escape sign
sep = CSVUtil.getSep(this);
quote = CSVUtil.getQuote(this);
esc = CSVUtil.getEscape(this);
List<String> headerRow = new ArrayList<String>();
// get the parameter to get the type definition
String exportType = getParameter(InstanceTableIOConstants.EXPORT_TYPE).as(String.class);
QName selectedTypeName = null;
if (exportType != null && !exportType.equals("") && !exportType.equals(" ")) {
selectedTypeName = QName.valueOf(exportType);
}
// get all instances of the selected Type
InstanceCollection instances = getInstanceCollection(selectedTypeName);
Iterator<Instance> instanceIterator = instances.iterator();
Instance instance = null;
try {
instance = instanceIterator.next();
} catch (NoSuchElementException e) {
reporter.error(new IOMessageImpl("There are no instances for the selected type.", e));
return reporter;
}
// get definition of current instance (only this properties with this
// definition type will be written to csv file)
TypeDefinition definition = instance.getDefinition();
// first csv file doesn't have a header row, so it' necessary to write
// it to a temp directory
File tempDir = Files.createTempDir();
File tempFile = new File(tempDir, "tempInstances.csv");
// write instances to csv file (without header)
CSVWriter writer = new CSVWriter(new OutputStreamWriter(new FileOutputStream(tempFile)), sep, quote, esc);
writeLine(solveNestedProperties, headerRow, instance, writer);
while (instanceIterator.hasNext()) {
Instance nextInst = instanceIterator.next();
if (nextInst.getDefinition().equals(definition)) {
writeLine(solveNestedProperties, headerRow, nextInst, writer);
}
}
writer.close();
// header is only finished if all properties have been processed
// insert header to temp file and write it to output
insertHeader(tempFile, getTarget().getOutput(), headerRow);
FileUtils.deleteDirectory(tempDir);
reporter.setSuccess(true);
return reporter;
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class XsltExport method execute.
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
File templateDir = Files.createTempDir();
progress.begin("Generate XSLT", ProgressIndicator.UNKNOWN);
try {
log.info("Template directory: " + templateDir.getAbsolutePath());
XmlIndex targetIndex = StreamGmlWriter.getXMLIndex(getTargetSchema());
if (targetIndex == null) {
throw new IllegalStateException("Target schema contains no XML schema");
}
XmlIndex sourceIndex = StreamGmlWriter.getXMLIndex(getSourceSchema());
if (sourceIndex == null) {
throw new IllegalStateException("Source schema contains no XML schema");
}
init(sourceIndex, targetIndex);
XmlElement containerElement = StreamGmlWriter.getConfiguredContainerElement(this, targetIndex);
if (containerElement == null) {
throw new IllegalStateException("No target container element specified");
}
XsltGenerator generator = new XsltGenerator(templateDir, getAlignment(), sourceIndex, targetIndex, reporter, progress, containerElement, getSourceContext(), projectInfo) {
@Override
protected void writeContainerIntro(XMLStreamWriter writer, XsltGenerationContext context) throws XMLStreamException, IOException {
XsltExport.this.writeContainerIntro(writer, context);
}
};
return generator.write(getTarget());
} catch (Exception e) {
reporter.error(new IOMessageImpl("XSLT generation failed", e));
reporter.setSuccess(false);
return reporter;
} finally {
progress.end();
try {
FileUtils.deleteDirectory(templateDir);
} catch (Exception e) {
// failure to delete the directory is not fatal
log.warn("Failed to delete temporary directory", e);
}
}
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class XsltGenerator method write.
/**
* Generate the XSLT transformation and write it to the given target.
*
* @param target the target output supplier
* @return the report
* @throws Exception if a unrecoverable error occurs during the process
*/
public IOReport write(LocatableOutputSupplier<? extends OutputStream> target) throws Exception {
Template root = ve.getTemplate(Templates.ROOT, "UTF-8");
VelocityContext context = XslTransformationUtil.createStrictVelocityContext();
// project info
context.put("info", ProjectXslInfo.getInfo(projectInfo));
// collects IDs of type cells
Set<String> typeIds = new HashSet<String>();
// type cells
for (Cell typeCell : alignment.getTypeCells()) {
if (typeCell.getTransformationMode() != TransformationMode.disabled) {
// ignore disabled cells
Entity targetEntity = CellUtil.getFirstEntity(typeCell.getTarget());
if (targetEntity != null) {
// assign identifiers for type transformations
String targetName = targetEntity.getDefinition().getDefinition().getName().getLocalPart();
String id = cellIdentifiers.getId(typeCell, targetName);
typeIds.add(id);
} else {
reporter.warn(new IOMessageImpl("Ignoring type relation without target type", null));
}
}
}
// collects IDs of type cells mapped to target element names
Map<String, QName> targetElements = new HashMap<String, QName>();
// container
File container = new File(workDir, "container.xsl");
progress.setCurrentTask("Generating container");
generateContainer(typeIds, container, targetElements);
Set<String> passiveCellIds = new HashSet<String>(typeIds);
progress.setCurrentTask("Generate type transformations");
// all active cells templates
for (Entry<String, QName> entry : targetElements.entrySet()) {
// generate XSL fragments for type transformations
String id = entry.getKey();
QName elementName = entry.getValue();
Cell typeCell = cellIdentifiers.getObject(id);
// this is not a passive cell
passiveCellIds.remove(id);
XmlElement targetElement = targetSchema.getElements().get(elementName);
String filename = "_" + id + ".xsl";
File file = new File(workDir, filename);
includes.add(filename);
generateTypeTransformation(id, targetElement, typeCell, file);
}
// all passive cell templates
for (String passiveId : passiveCellIds) {
Cell typeCell = cellIdentifiers.getObject(passiveId);
String filename = "_" + passiveId + ".xsl";
File file = new File(workDir, filename);
includes.add(filename);
// XXX dummy target element
XmlElement targetElement = new XmlElement(new QName(NS_XSL_DEFINITIONS, "dummy"), null, null);
generateTypeTransformation(passiveId, targetElement, typeCell, file);
// for passive cells no variables should be created
typeIds.remove(passiveId);
}
// namespaces that occur additionally to the fixed namespaces
Map<String, String> additionalNamespaces = new HashMap<String, String>(prefixes.asMap());
for (String fixedPrefix : FIXED_PREFIXES.keySet()) {
additionalNamespaces.remove(fixedPrefix);
}
context.put("additionalNamespaces", additionalNamespaces);
// types cells
/*
* The type identifiers are used as variable name to store the result of
* the equally named template.
*/
context.put("targets", typeIds);
// includes
// TODO check if files to include are actually there?
context.put("includes", includes);
OutputStream out = target.getOutput();
XMLPrettyPrinter printer = new XMLPrettyPrinter(out);
Future<?> ready = printer.start();
Writer writer = new OutputStreamWriter(printer, "UTF-8");
try {
root.merge(context, writer);
writer.flush();
} finally {
writer.close();
ready.get();
out.close();
}
reporter.setSuccess(reporter.getErrors().isEmpty());
return reporter;
}
Aggregations