use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class InspireInstanceWriter method writeAdditionalElements.
/**
* @see StreamGmlWriter#writeAdditionalElements(XMLStreamWriter,
* TypeDefinition, IOReporter)
*/
@Override
protected void writeAdditionalElements(XMLStreamWriter writer, TypeDefinition containerDefinition, IOReporter reporter) throws XMLStreamException {
super.writeAdditionalElements(writer, containerDefinition, reporter);
// determine INSPIRE identifier and metadata names
Path<Definition<?>> localIdPath = new DefinitionAccessor(containerDefinition).findChildren("identifier").findChildren("Identifier").findChildren("localId").eval(false);
QName identifierName = localIdPath.getElements().get(1).getName();
Definition<?> internalIdentifierDef = localIdPath.getElements().get(2);
QName internalIdentifierName = internalIdentifierDef.getName();
QName localIdName = localIdPath.getElements().get(3).getName();
Path<Definition<?>> namespacePath = new DefinitionAccessor(internalIdentifierDef).findChildren("namespace").eval(false);
QName namespaceName = namespacePath.getElements().get(1).getName();
Path<Definition<?>> metadataPath = new DefinitionAccessor(containerDefinition).findChildren("metadata").eval(false);
QName metadataName = metadataPath.getElements().get(1).getName();
// write INSPIRE identifier
writer.writeStartElement(identifierName.getNamespaceURI(), identifierName.getLocalPart());
writer.writeStartElement(internalIdentifierName.getNamespaceURI(), internalIdentifierName.getLocalPart());
writer.writeStartElement(localIdName.getNamespaceURI(), localIdName.getLocalPart());
writer.writeCharacters(getParameter(PARAM_SPATIAL_DATA_SET_LOCALID).as(String.class, ""));
writer.writeEndElement();
writer.writeStartElement(namespaceName.getNamespaceURI(), namespaceName.getLocalPart());
writer.writeCharacters(getParameter(PARAM_SPATIAL_DATA_SET_NAMESPACE).as(String.class, ""));
writer.writeEndElement();
writer.writeEndElement();
writer.writeEndElement();
// write metadata
writer.writeStartElement(metadataName.getNamespaceURI(), metadataName.getLocalPart());
// retrieve metadata element (if any)
Element metadataElement = getParameter(PARAM_SPATIAL_DATA_SET_METADATA_DOM).as(Element.class);
// metadata from file (if any)
if (metadataElement == null) {
String metadataFile = getParameter(PARAM_SPATIAL_DATA_SET_METADATA_FILE).as(String.class);
if (metadataFile != null && !metadataFile.isEmpty()) {
try (InputStream input = new BufferedInputStream(new FileInputStream(new File(metadataFile)))) {
metadataElement = findMetadata(input, reporter);
} catch (IOException e) {
reporter.warn(new IOMessageImpl("Could not load specified metadata file.", e));
}
}
}
if (metadataElement != null) {
try {
writeElement(metadataElement, writer);
} catch (TransformerException e) {
reporter.warn(new IOMessageImpl("Couldn't include specified metadata file.", e));
}
} else {
writer.writeAttribute(XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, "nil", "true");
}
writer.writeEndElement();
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class StreamGmlWriter method write.
/**
* Write the given instances to an {@link XMLStreamWriter}.<br>
* <br>
* Use {@link #createWriter(OutputStream, IOReporter)} to create a properly
* configured writer for this method.
*
* @param instances the instance collection
* @param writer the writer to write the instances to
* @param reporter the reporter
* @param progress the progress
* @see #createWriter(OutputStream, IOReporter)
*/
protected void write(InstanceCollection instances, PrefixAwareStreamWriter writer, ProgressIndicator progress, IOReporter reporter) {
this.writer = writer;
try {
final SubtaskProgressIndicator sub = new SubtaskProgressIndicator(progress) {
@Override
protected String getCombinedTaskName(String taskName, String subtaskName) {
return taskName + " (" + subtaskName + ")";
}
};
progress = sub;
progress.begin(getTaskName(), instances.size());
XmlElement container = findDefaultContainter(targetIndex, reporter);
TypeDefinition containerDefinition = (container == null) ? (null) : (container.getType());
QName containerName = (container == null) ? (null) : (container.getName());
if (containerDefinition == null) {
XmlElement containerElement = getConfiguredContainerElement(this, getXMLIndex());
containerDefinition = containerElement.getType();
containerName = containerElement.getName();
}
if (containerDefinition == null || containerName == null) {
throw new IllegalStateException("No root element/container found");
}
/*
* Add schema for container to validation schemas, if the namespace
* differs from the main namespace or additional schemas.
*
* Needed for validation based on schemaLocation attribute.
*/
if (!containerName.getNamespaceURI().equals(targetIndex.getNamespace()) && !additionalSchemas.containsKey(containerName.getNamespaceURI())) {
try {
@SuppressWarnings("null") final URI containerSchemaLoc = stripFragment(container.getLocation());
if (containerSchemaLoc != null) {
addValidationSchema(containerName.getNamespaceURI(), new Locatable() {
@Override
public URI getLocation() {
return containerSchemaLoc;
}
}, null);
}
} catch (Exception e) {
reporter.error(new IOMessageImpl("Could not determine location of container definition", e));
}
}
// additional schema namespace prefixes
for (Entry<String, String> schemaNs : additionalSchemaPrefixes.entrySet()) {
GmlWriterUtil.addNamespace(writer, schemaNs.getKey(), schemaNs.getValue());
}
writer.writeStartDocument();
if (documentWrapper != null) {
documentWrapper.startWrap(writer, reporter);
}
GmlWriterUtil.writeStartElement(writer, containerName);
// generate mandatory id attribute (for feature collection)
String containerId = getParameter(PARAM_CONTAINER_ID).as(String.class);
GmlWriterUtil.writeID(writer, containerDefinition, null, false, containerId);
// write schema locations
StringBuffer locations = new StringBuffer();
String noNamespaceLocation = null;
if (targetIndex.getNamespace() != null && !targetIndex.getNamespace().isEmpty()) {
locations.append(targetIndex.getNamespace());
// $NON-NLS-1$
locations.append(" ");
locations.append(targetIndex.getLocation().toString());
} else {
noNamespaceLocation = targetIndex.getLocation().toString();
}
for (Entry<String, Locatable> schema : additionalSchemas.entrySet()) {
if (schema.getKey() != null && !schema.getKey().isEmpty()) {
if (locations.length() > 0) {
// $NON-NLS-1$
locations.append(" ");
}
locations.append(schema.getKey());
// $NON-NLS-1$
locations.append(" ");
locations.append(schema.getValue().getLocation().toString());
} else {
noNamespaceLocation = schema.getValue().getLocation().toString();
}
}
if (locations.length() > 0) {
// $NON-NLS-1$
writer.writeAttribute(SCHEMA_INSTANCE_NS, "schemaLocation", locations.toString());
}
if (noNamespaceLocation != null) {
// $NON-NLS-1$
writer.writeAttribute(// $NON-NLS-1$
SCHEMA_INSTANCE_NS, // $NON-NLS-1$
"noNamespaceSchemaLocation", noNamespaceLocation);
}
writeAdditionalElements(writer, containerDefinition, reporter);
// write the instances
ResourceIterator<Instance> itInstance = instances.iterator();
try {
Map<TypeDefinition, DefinitionPath> paths = new HashMap<TypeDefinition, DefinitionPath>();
long lastUpdate = 0;
int count = 0;
Descent lastDescent = null;
while (itInstance.hasNext() && !progress.isCanceled()) {
Instance instance = itInstance.next();
TypeDefinition type = instance.getDefinition();
/*
* Skip all objects that are no features when writing to a
* GML feature collection.
*/
boolean skip = useFeatureCollection && !GmlWriterUtil.isFeatureType(type);
if (skip) {
progress.advance(1);
continue;
}
// get stored definition path for the type
DefinitionPath defPath;
if (paths.containsKey(type)) {
// get the stored path, may be null
defPath = paths.get(type);
} else {
// determine a valid definition path in the container
defPath = findMemberAttribute(containerDefinition, containerName, type);
// store path (may be null)
paths.put(type, defPath);
}
if (defPath != null) {
// write the feature
lastDescent = Descent.descend(writer, defPath, lastDescent, false);
writeMember(instance, type, reporter);
} else {
reporter.warn(new IOMessageImpl(MessageFormat.format("No compatible member attribute for type {0} found in root element {1}, one instance was skipped", type.getDisplayName(), containerName.getLocalPart()), null));
}
progress.advance(1);
count++;
long now = System.currentTimeMillis();
// only update every 100 milliseconds
if (now - lastUpdate > 100 || !itInstance.hasNext()) {
lastUpdate = now;
sub.subTask(String.valueOf(count) + " instances");
}
}
if (lastDescent != null) {
lastDescent.close();
}
} finally {
itInstance.close();
}
// FeatureCollection
writer.writeEndElement();
if (documentWrapper != null) {
documentWrapper.endWrap(writer, reporter);
}
writer.writeEndDocument();
writer.close();
reporter.setSuccess(reporter.getErrors().isEmpty());
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
} finally {
progress.end();
}
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class SpatiaLiteTestSuite method instanceWriterTest.
/**
* Test - reads data from a source SpatiaLite database, writes them to a
* target SpatiaLite database and checks the results.
*
* @throws Exception if an error occurs
*/
public void instanceWriterTest() throws Exception {
if (!isSpatiaLiteExtensionAvailable()) {
log.info("Skipping test because SpatiaLite extension is not available");
return;
}
Map<String, Object> propertyMap = new HashMap<String, Object>();
for (int i = 0; i < SOUURCE_TYPE_PROPERTY_NAMES.length; i++) {
String key = SOUURCE_TYPE_PROPERTY_NAMES[i];
Object value = SOUURCE_TYPE_PROPERTY_VALUES[i];
propertyMap.put(key, value);
}
// ****** read Schema ******//
Schema schema = readSchema(getSourceTempFilePath());
assertNotNull(schema);
assertEquals(1, schema.getMappingRelevantTypes().size());
// Test properties
TypeDefinition schemaType = schema.getMappingRelevantTypes().iterator().next();
// Check every property for their existence
checkType(schemaType, SOUURCE_TYPE_LOCAL_NAME, propertyMap.keySet());
// ****** read Instances ******//
InstanceCollection instances = readInstances(schema, getSourceTempFilePath());
assertTrue(instances.hasSize());
assertEquals(SOURCE_INSTANCES_COUNT, instances.size());
checkInstances(instances, propertyMap);
// ****** write Instances ******//
// check target DB is empty
InstanceCollection targetInstances = readInstances(schema, getTargetTempFilePath());
assertTrue(targetInstances.hasSize());
assertEquals(0, targetInstances.size());
writeInstances(schema, getTargetTempFilePath(), instances);
// re-read instances to check they were written correctly
targetInstances = readInstances(schema, getTargetTempFilePath());
assertTrue(targetInstances.hasSize());
assertEquals(SOURCE_INSTANCES_COUNT, targetInstances.size());
checkInstances(targetInstances, propertyMap);
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class AbstractDBTest method checkBindingAndSqlType.
/**
* It checks if the binding of a data type read from schema and the expected
* binding are equal.
*
* @param map It maps a data type with a binding class to be expected. e.g.
* for postgresql db, for data type VARCHAR, the expected binding
* class is String.class
* @param schema the schema read.
* @throws Exception exception may thrown while getting the value of a
* static or instance field of type int.
*/
protected void checkBindingAndSqlType(Schema schema, Map<String, Class<?>> map) throws Exception {
final Map<String, Integer> sqlTypeMap = new HashMap<>();
// all types fields
for (final Field f : Types.class.getFields()) {
sqlTypeMap.put(f.getName(), f.getInt(null));
}
for (TypeDefinition td : schema.getTypes()) {
for (ChildDefinition<?> cd : td.getChildren()) {
PropertyDefinition property = cd.asProperty();
String name = property.getPropertyType().getName().getLocalPart().toUpperCase();
SQLType t = property.getPropertyType().getConstraint(SQLType.class);
assertTrue(sqlTypeMap.containsValue(new Integer(t.getType())));
Binding k = property.getPropertyType().getConstraint(Binding.class);
// is mapped.
if (map.containsKey(name))
assertEquals(map.get(name), k.getBinding());
else
fail(MessageFormat.format("No expected binding specified for type {0} (SQL type {1}) - binding is {2}", name, t.getType(), k.getBinding()));
}
}
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class SQLSchemaPage method updateState.
private boolean updateState(boolean runQuery) {
boolean typeValid = false;
boolean sqlValid = false;
String error = null;
String message = null;
if (typeName != null) {
// check type name
String type = typeName.getText();
typeValid = type != null && !type.isEmpty();
if (typeValid) {
// check if the name already exists in the source schema
SchemaService schemas = HaleUI.getServiceProvider().getService(SchemaService.class);
if (schemas != null) {
TypeDefinition existing = schemas.getSchemas(SchemaSpaceID.SOURCE).getType(new QName(SQLSchemaReader.NAMESPACE, type));
if (existing != null) {
typeValid = false;
error = "An SQL query with this name already exists";
}
}
// also test for specific characters?
}
}
if (sqlQuery != null) {
// check SQL query
String sql = sqlQuery.getText();
sqlValid = sql != null && !sql.isEmpty();
if (sqlValid) {
String processedQuery;
try {
processedQuery = JDBCUtil.replaceVariables(sql, HaleUI.getServiceProvider());
} catch (Exception e) {
error = e.getLocalizedMessage();
sqlValid = false;
processedQuery = null;
}
// check if processed SQL query can be executed
if (runQuery && processedQuery != null) {
ImportProvider provider = getWizard().getProvider();
if (provider != null && provider instanceof JDBCProvider) {
Connection connection = null;
try {
try {
connection = ((JDBCProvider) provider).getConnection();
} catch (SQLException e) {
sqlValid = false;
error = "Could not establish database connection: " + e.getLocalizedMessage();
}
if (connection != null) {
try {
Statement statement = JDBCUtil.createReadStatement(connection, 1);
try {
ResultSet result = statement.executeQuery(processedQuery);
int columnCount = result.getMetaData().getColumnCount();
if (columnCount <= 0) {
sqlValid = false;
error = "Query result does not have any columns";
} else {
if (columnCount == 1) {
message = "Successfully tested query. It yields a result with a single column.";
} else {
message = MessageFormat.format("Successfully tested query. It yields a result with {0} columns.", columnCount);
}
}
} catch (SQLException e) {
sqlValid = false;
error = "Error querying database: " + e.getMessage();
} finally {
statement.close();
}
} catch (SQLException e) {
sqlValid = false;
error = "Could not create database statement: " + e.getMessage();
}
}
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
// ignore
}
}
}
}
}
}
}
boolean complete = typeValid && sqlValid;
if (complete) {
error = null;
} else if (!typeValid && error == null) {
error = "Please provide a name for the query";
} else if (error == null) {
error = "Please specify the SQL query to use";
}
setMessage(message);
setErrorMessage(error);
setPageComplete(complete);
return complete;
}
Aggregations