use of eu.esdihumboldt.hale.common.instance.io.InstanceWriter in project hale by halestudio.
the class Transformation method transform.
/**
* Transform the instances provided through the given instance readers and
* supply the result to the given instance writer.
*
* @param sources the instance readers
* @param target the target instance writer
* @param environment the transformation environment
* @param reportHandler the report handler
* @param processId the identifier for the transformation process, may be
* <code>null</code> if grouping the jobs to a job family is not
* necessary
* @param validators the instance validators, may be <code>null</code> or
* empty
* @param filterDefinition {@link InstanceFilterDefinition} object as a
* filter may be <code>null</code>
* @return the future representing the successful completion of the
* transformation (note that a successful completion doesn't
* necessary mean there weren't any internal transformation errors)
*/
public static ListenableFuture<Boolean> transform(List<InstanceReader> sources, InstanceWriter target, final TransformationEnvironment environment, final ReportHandler reportHandler, Object processId, Collection<InstanceValidator> validators, InstanceFilterDefinition filterDefinition) {
final IOAdvisor<InstanceReader> loadDataAdvisor = new AbstractIOAdvisor<InstanceReader>() {
/**
* @see IOAdvisor#prepareProvider(IOProvider)
*/
@Override
public void prepareProvider(InstanceReader provider) {
super.prepareProvider(provider);
provider.setSourceSchema(environment.getSourceSchema());
}
/**
* @see AbstractIOAdvisor#updateConfiguration(IOProvider)
*/
@Override
public void updateConfiguration(InstanceReader provider) {
super.updateConfiguration(provider);
if (environment instanceof ProjectTransformationEnvironment) {
// set project CRS manager as CRS provider
/*
* Resource based CRS settings will however not work, as the
* resource identifiers will not match
*/
provider.setCRSProvider(new ProjectCRSManager(provider, null, ((ProjectTransformationEnvironment) environment).getProject()));
}
}
};
loadDataAdvisor.setServiceProvider(environment);
loadDataAdvisor.setActionId(InstanceIO.ACTION_LOAD_SOURCE_DATA);
List<InstanceCollection> sourceList = Lists.transform(sources, new Function<InstanceReader, InstanceCollection>() {
@Override
public InstanceCollection apply(@Nullable InstanceReader input) {
try {
HeadlessIO.executeProvider(input, loadDataAdvisor, null, reportHandler);
// XXX progress?!
} catch (IOException e) {
throw new IllegalStateException("Failed to load source data", e);
}
return input.getInstances();
}
});
// Apply Filter
InstanceCollection sourceCollection = applyFilter(sourceList, filterDefinition);
final TransformationSink targetSink;
try {
targetSink = TransformationSinkExtension.getInstance().createSink(!target.isPassthrough());
targetSink.setTypes(environment.getTargetSchema());
// add validation to sink
// XXX for now default validation if env variable is set
String env = System.getenv("HALE_TRANSFORMATION_INTERNAL_VALIDATION");
if (env != null && env.equalsIgnoreCase("true")) {
targetSink.addValidator(new DefaultTransformedInstanceValidator(reportHandler, environment));
}
} catch (Exception e) {
throw new IllegalStateException("Error creating target sink", e);
}
IOAdvisor<InstanceWriter> saveDataAdvisor = new AbstractIOAdvisor<InstanceWriter>() {
/**
* @see IOAdvisor#prepareProvider(IOProvider)
*/
@Override
public void prepareProvider(InstanceWriter provider) {
super.prepareProvider(provider);
// set target schema
provider.setTargetSchema(environment.getTargetSchema());
// set instances to export
provider.setInstances(targetSink.getInstanceCollection());
}
};
saveDataAdvisor.setServiceProvider(environment);
saveDataAdvisor.setActionId(InstanceIO.ACTION_SAVE_TRANSFORMED_DATA);
saveDataAdvisor.prepareProvider(target);
saveDataAdvisor.updateConfiguration(target);
ExportJob exportJob = new ExportJob(targetSink, target, saveDataAdvisor, reportHandler);
// no validation
ValidationJob validationJob = null;
if (validators != null && !validators.isEmpty()) {
validationJob = new ValidationJob(validators, reportHandler, target, environment);
}
return transform(sourceCollection, targetSink, exportJob, validationJob, environment.getAlignment(), environment.getSourceSchema(), reportHandler, environment, processId);
}
use of eu.esdihumboldt.hale.common.instance.io.InstanceWriter in project hale by halestudio.
the class CSVInstanceWriterTest method writeCsvToFile.
private boolean writeCsvToFile(File tmpFile, boolean skipFirstLine, Value sep, Value quo, Value esc, InstanceCollection instances) throws Exception {
// set instances to xls instance writer
InstanceWriter writer = new CSVInstanceWriter();
IContentType contentType = HalePlatform.getContentTypeManager().getContentType("eu.esdihumboldt.hale.io.csv");
writer.setParameter(InstanceTableIOConstants.SOLVE_NESTED_PROPERTIES, Value.of(skipFirstLine));
writer.setParameter(CSVSchemaReader.PARAM_SEPARATOR, sep);
writer.setParameter(CSVSchemaReader.PARAM_QUOTE, quo);
writer.setParameter(CSVSchemaReader.PARAM_ESCAPE, esc);
writer.setInstances(instances);
// write instances to a temporary CSV file
writer.setTarget(new FileIOSupplier(tmpFile));
writer.setContentType(contentType);
IOReport report = writer.execute(null);
return report.isSuccess();
}
use of eu.esdihumboldt.hale.common.instance.io.InstanceWriter in project hale by halestudio.
the class StreamGmlWriterTest method fillFeatureTest.
/**
* Create a feature, fill it with values, write it as GML, validate the GML
* and load the GML file again to compare the loaded values with the ones
* that were written
*
* @param elementName the element name of the feature type to use, if
* <code>null</code> a random element will be used
* @param targetSchema the schema to use, the first element will be used for
* the type of the feature
* @param values the values to set on the feature
* @param testName the name of the test
* @param srsName the SRS name
* @param skipValueTest if the check for equality shall be skipped
* @param expectWriteFail if the GML writing is expected to fail
* @param windingOrderParam winding order parameter or <code>null</code>
* @return the validation report or the GML writing report if writing
* expected to fail
* @throws Exception if any error occurs
*/
private IOReport fillFeatureTest(String elementName, URI targetSchema, Map<List<QName>, Object> values, String testName, String srsName, boolean skipValueTest, boolean expectWriteFail, EnumWindingOrderTypes windingOrderParam) throws Exception {
// load the sample schema
XmlSchemaReader reader = new XmlSchemaReader();
reader.setSharedTypes(null);
reader.setSource(new DefaultInputSupplier(targetSchema));
IOReport schemaReport = reader.execute(null);
assertTrue(schemaReport.isSuccess());
XmlIndex schema = reader.getSchema();
XmlElement element = null;
if (elementName == null) {
element = schema.getElements().values().iterator().next();
if (element == null) {
// $NON-NLS-1$
fail("No element found in the schema");
}
} else {
for (XmlElement candidate : schema.getElements().values()) {
if (candidate.getName().getLocalPart().equals(elementName)) {
element = candidate;
break;
}
}
if (element == null) {
// $NON-NLS-1$ //$NON-NLS-2$
fail("Element " + elementName + " not found in the schema");
}
}
if (element == null) {
throw new IllegalStateException();
}
// create feature
MutableInstance feature = new DefaultInstance(element.getType(), null);
// set some values
for (Entry<List<QName>, Object> entry : values.entrySet()) {
MutableGroup parent = feature;
List<QName> properties = entry.getKey();
for (int i = 0; i < properties.size() - 1; i++) {
QName propertyName = properties.get(i);
DefinitionGroup def = parent.getDefinition();
Object[] vals = parent.getProperty(propertyName);
if (vals != null && vals.length > 0) {
Object value = vals[0];
if (value instanceof MutableGroup) {
parent = (MutableGroup) value;
} else {
MutableGroup child;
ChildDefinition<?> childDef = def.getChild(propertyName);
if (childDef.asProperty() != null || value != null) {
// create instance
child = new DefaultInstance(childDef.asProperty().getPropertyType(), null);
} else {
// create group
child = new DefaultGroup(childDef.asGroup());
}
if (value != null) {
// wrap value
((MutableInstance) child).setValue(value);
}
parent = child;
}
}
}
parent.addProperty(properties.get(properties.size() - 1), entry.getValue());
}
InstanceCollection instances = new DefaultInstanceCollection(Collections.singleton(feature));
// write to file
InstanceWriter writer = new GmlInstanceWriter();
if (windingOrderParam != null) {
writer.setParameter(GeoInstanceWriter.PARAM_UNIFY_WINDING_ORDER, Value.of(windingOrderParam));
}
writer.setInstances(instances);
DefaultSchemaSpace schemaSpace = new DefaultSchemaSpace();
schemaSpace.addSchema(schema);
writer.setTargetSchema(schemaSpace);
// $NON-NLS-1$
File outFile = File.createTempFile(testName, ".gml");
writer.setTarget(new FileIOSupplier(outFile));
if (windingOrderParam != null && windingOrderParam == EnumWindingOrderTypes.counterClockwise) {
assertTrue(writer.getParameter(GeoInstanceWriter.PARAM_UNIFY_WINDING_ORDER).as(EnumWindingOrderTypes.class) == EnumWindingOrderTypes.counterClockwise);
}
// new LogProgressIndicator());
IOReport report = writer.execute(null);
if (expectWriteFail) {
assertFalse("Writing the GML output should not be successful", report.isSuccess());
return report;
} else {
assertTrue("Writing the GML output not successful", report.isSuccess());
}
List<? extends Locatable> validationSchemas = writer.getValidationSchemas();
System.out.println(outFile.getAbsolutePath());
System.out.println(targetSchema.toString());
// if (!DEL_TEMP_FILES && Desktop.isDesktopSupported()) {
// Desktop.getDesktop().open(outFile);
// }
IOReport valReport = validate(outFile.toURI(), validationSchemas);
// load file
InstanceCollection loaded = loadGML(outFile.toURI(), schema);
ResourceIterator<Instance> it = loaded.iterator();
try {
assertTrue(it.hasNext());
if (!skipValueTest) {
Instance l = it.next();
// test values
for (Entry<List<QName>, Object> entry : values.entrySet()) {
// XXX conversion?
Object expected = entry.getValue();
// String propertyPath = Joiner.on('.').join(Collections2.transform(entry.getKey(), new Function<QName, String>() {
//
// @Override
// public String apply(QName input) {
// return input.toString();
// }
// }));
// Collection<Object> propValues = PropertyResolver.getValues(
// l, propertyPath, true);
// assertEquals(1, propValues.size());
// Object value = propValues.iterator().next();
Collection<GeometryProperty<?>> geoms = GeometryUtil.getAllGeometries(l);
assertEquals(1, geoms.size());
Object value = geoms.iterator().next().getGeometry();
if (expected instanceof Geometry && value instanceof Geometry) {
if (windingOrderParam == null || windingOrderParam == EnumWindingOrderTypes.noChanges) {
matchGeometries((Geometry) expected, (Geometry) value);
}
// Winding Order Test.
if (windingOrderParam != null) {
if (windingOrderParam == EnumWindingOrderTypes.counterClockwise) {
assertTrue(((Geometry) expected).getNumGeometries() == ((Geometry) value).getNumGeometries());
assertTrue(WindingOrder.isCounterClockwise((Geometry) value));
} else if (windingOrderParam == EnumWindingOrderTypes.clockwise) {
assertFalse(WindingOrder.isCounterClockwise((Geometry) value));
} else {
assertTrue(WindingOrder.isCounterClockwise((Geometry) value) == WindingOrder.isCounterClockwise((Geometry) expected));
}
} else {
// TODO check winding order is CCW
if (value instanceof Polygon || value instanceof MultiPolygon)
assertTrue(WindingOrder.isCounterClockwise((Geometry) value));
}
} else {
assertEquals(expected.toString(), value.toString());
}
}
assertFalse(it.hasNext());
}
} finally {
it.close();
}
if (DEL_TEMP_FILES) {
outFile.deleteOnExit();
}
return valReport;
}
use of eu.esdihumboldt.hale.common.instance.io.InstanceWriter in project hale by halestudio.
the class IOReferenceContent method getInputStream.
@Override
public InputStream getInputStream(String pluginID, String href, Locale locale) {
// it is an I/O provider reference
if (href.startsWith(IO_PROVIDERS_TOPIC_PATH)) {
String providerId = href.substring(IO_PROVIDERS_TOPIC_PATH.length());
// strip everything after a ?
int ind = providerId.indexOf('?');
if (ind >= 0) {
providerId = providerId.substring(0, ind);
}
// strip the .*htm? ending
if (providerId.endsWith("html") || providerId.endsWith("htm")) {
providerId = providerId.substring(0, providerId.lastIndexOf('.'));
}
try {
return getIOProviderContent(providerId);
} catch (Exception e) {
log.error("Error creating instance io info page.", e);
return null;
}
} else // should be an I/O provider overview by type
if (href.startsWith(OVERVIEW_TOPIC_PATH)) {
// extract provider type name
String providerType = href.substring(OVERVIEW_TOPIC_PATH.length());
// strip everything after a ?
int ind = providerType.indexOf('?');
if (ind >= 0) {
providerType = providerType.substring(0, ind);
}
// strip the .*htm? ending
if (providerType.endsWith("html") || providerType.endsWith("htm")) {
providerType = providerType.substring(0, providerType.lastIndexOf('.'));
}
Class<? extends IOProvider> providerClass = null;
switch(providerType) {
case "InstanceReader":
providerClass = InstanceReader.class;
break;
case "InstanceWriter":
providerClass = InstanceWriter.class;
break;
case "InstanceValidator":
providerClass = InstanceValidator.class;
break;
}
if (providerClass != null) {
final Class<? extends IOProvider> provider = providerClass;
try {
return getContentFromTemplate("overview." + providerType, TEMPLATE_OVERVIEW, new Callable<VelocityContext>() {
@Override
public VelocityContext call() throws Exception {
VelocityContext context = new VelocityContext();
// getProviderFactorries returns
// Collection<IOProviderDescriptor>
Collection<IOProviderDescriptor> writer = HaleIO.getProviderFactories(provider);
context.put("providers", writer);
context.put("providerType", provider.getSimpleName());
return context;
}
});
} catch (Exception e) {
log.error("Error creating provider overview", e);
return null;
}
}
return null;
}
return null;
}
use of eu.esdihumboldt.hale.common.instance.io.InstanceWriter in project hale by halestudio.
the class ProjectTransformationEnvironment method init.
/**
* Initialize the environment based on the loaded project.
*
* @param project the project
*/
protected void init(Project project) {
// export presets
for (Entry<String, IOConfiguration> preset : project.getExportConfigurations().entrySet()) {
IOConfiguration conf = preset.getValue();
if (InstanceIO.ACTION_SAVE_TRANSFORMED_DATA.equals(conf.getActionId())) {
// configuration for data export
IOConfiguration c = conf.clone();
// check provider
IOProviderDescriptor factory = HaleIO.findIOProviderFactory(InstanceWriter.class, null, c.getProviderId());
if (factory != null) {
String name = preset.getKey();
if (Strings.isNullOrEmpty(name)) {
name = factory.getDisplayName();
}
exportPresets.put(name, c);
} else {
log.error("I/O provider for export preset not found.");
}
}
}
// export templates
Collection<IOProviderDescriptor> writerFactories = HaleIO.getProviderFactories(InstanceWriter.class);
for (IOProviderDescriptor factory : writerFactories) {
try {
InstanceWriter writer = (InstanceWriter) factory.createExtensionObject();
writer.setTargetSchema(getTargetSchema());
writer.checkCompatibility();
IOConfiguration conf = new IOConfiguration();
conf.setActionId(InstanceIO.ACTION_SAVE_TRANSFORMED_DATA);
conf.setProviderId(factory.getIdentifier());
exportTemplates.put(factory.getDisplayName(), conf);
} catch (IOProviderConfigurationException e) {
// ignore
} catch (Exception e) {
log.error("Error initializing instance writer for testing compatibility", e);
}
}
}
Aggregations