use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class StreamGmlWriter method write.
/**
* Write the given instances to an {@link XMLStreamWriter}.<br>
* <br>
* Use {@link #createWriter(OutputStream, IOReporter)} to create a properly
* configured writer for this method.
*
* @param instances the instance collection
* @param writer the writer to write the instances to
* @param reporter the reporter
* @param progress the progress
* @see #createWriter(OutputStream, IOReporter)
*/
protected void write(InstanceCollection instances, PrefixAwareStreamWriter writer, ProgressIndicator progress, IOReporter reporter) {
this.writer = writer;
try {
final SubtaskProgressIndicator sub = new SubtaskProgressIndicator(progress) {
@Override
protected String getCombinedTaskName(String taskName, String subtaskName) {
return taskName + " (" + subtaskName + ")";
}
};
progress = sub;
progress.begin(getTaskName(), instances.size());
XmlElement container = findDefaultContainter(targetIndex, reporter);
TypeDefinition containerDefinition = (container == null) ? (null) : (container.getType());
QName containerName = (container == null) ? (null) : (container.getName());
if (containerDefinition == null) {
XmlElement containerElement = getConfiguredContainerElement(this, getXMLIndex());
containerDefinition = containerElement.getType();
containerName = containerElement.getName();
}
if (containerDefinition == null || containerName == null) {
throw new IllegalStateException("No root element/container found");
}
/*
* Add schema for container to validation schemas, if the namespace
* differs from the main namespace or additional schemas.
*
* Needed for validation based on schemaLocation attribute.
*/
if (!containerName.getNamespaceURI().equals(targetIndex.getNamespace()) && !additionalSchemas.containsKey(containerName.getNamespaceURI())) {
try {
@SuppressWarnings("null") final URI containerSchemaLoc = stripFragment(container.getLocation());
if (containerSchemaLoc != null) {
addValidationSchema(containerName.getNamespaceURI(), new Locatable() {
@Override
public URI getLocation() {
return containerSchemaLoc;
}
}, null);
}
} catch (Exception e) {
reporter.error(new IOMessageImpl("Could not determine location of container definition", e));
}
}
// additional schema namespace prefixes
for (Entry<String, String> schemaNs : additionalSchemaPrefixes.entrySet()) {
GmlWriterUtil.addNamespace(writer, schemaNs.getKey(), schemaNs.getValue());
}
writer.writeStartDocument();
if (documentWrapper != null) {
documentWrapper.startWrap(writer, reporter);
}
GmlWriterUtil.writeStartElement(writer, containerName);
// generate mandatory id attribute (for feature collection)
String containerId = getParameter(PARAM_CONTAINER_ID).as(String.class);
GmlWriterUtil.writeID(writer, containerDefinition, null, false, containerId);
// write schema locations
StringBuffer locations = new StringBuffer();
String noNamespaceLocation = null;
if (targetIndex.getNamespace() != null && !targetIndex.getNamespace().isEmpty()) {
locations.append(targetIndex.getNamespace());
// $NON-NLS-1$
locations.append(" ");
locations.append(targetIndex.getLocation().toString());
} else {
noNamespaceLocation = targetIndex.getLocation().toString();
}
for (Entry<String, Locatable> schema : additionalSchemas.entrySet()) {
if (schema.getKey() != null && !schema.getKey().isEmpty()) {
if (locations.length() > 0) {
// $NON-NLS-1$
locations.append(" ");
}
locations.append(schema.getKey());
// $NON-NLS-1$
locations.append(" ");
locations.append(schema.getValue().getLocation().toString());
} else {
noNamespaceLocation = schema.getValue().getLocation().toString();
}
}
if (locations.length() > 0) {
// $NON-NLS-1$
writer.writeAttribute(SCHEMA_INSTANCE_NS, "schemaLocation", locations.toString());
}
if (noNamespaceLocation != null) {
// $NON-NLS-1$
writer.writeAttribute(// $NON-NLS-1$
SCHEMA_INSTANCE_NS, // $NON-NLS-1$
"noNamespaceSchemaLocation", noNamespaceLocation);
}
writeAdditionalElements(writer, containerDefinition, reporter);
// write the instances
ResourceIterator<Instance> itInstance = instances.iterator();
try {
Map<TypeDefinition, DefinitionPath> paths = new HashMap<TypeDefinition, DefinitionPath>();
long lastUpdate = 0;
int count = 0;
Descent lastDescent = null;
while (itInstance.hasNext() && !progress.isCanceled()) {
Instance instance = itInstance.next();
TypeDefinition type = instance.getDefinition();
/*
* Skip all objects that are no features when writing to a
* GML feature collection.
*/
boolean skip = useFeatureCollection && !GmlWriterUtil.isFeatureType(type);
if (skip) {
progress.advance(1);
continue;
}
// get stored definition path for the type
DefinitionPath defPath;
if (paths.containsKey(type)) {
// get the stored path, may be null
defPath = paths.get(type);
} else {
// determine a valid definition path in the container
defPath = findMemberAttribute(containerDefinition, containerName, type);
// store path (may be null)
paths.put(type, defPath);
}
if (defPath != null) {
// write the feature
lastDescent = Descent.descend(writer, defPath, lastDescent, false);
writeMember(instance, type, reporter);
} else {
reporter.warn(new IOMessageImpl(MessageFormat.format("No compatible member attribute for type {0} found in root element {1}, one instance was skipped", type.getDisplayName(), containerName.getLocalPart()), null));
}
progress.advance(1);
count++;
long now = System.currentTimeMillis();
// only update every 100 milliseconds
if (now - lastUpdate > 100 || !itInstance.hasNext()) {
lastUpdate = now;
sub.subTask(String.valueOf(count) + " instances");
}
}
if (lastDescent != null) {
lastDescent.close();
}
} finally {
itInstance.close();
}
// FeatureCollection
writer.writeEndElement();
if (documentWrapper != null) {
documentWrapper.endWrap(writer, reporter);
}
writer.writeEndDocument();
writer.close();
reporter.setSuccess(reporter.getErrors().isEmpty());
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
} finally {
progress.end();
}
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class StreamGeometryWriter method write.
/**
* Write a geometry to a stream for a GML document
*
* @param writer the XML stream writer
* @param geometry the geometry
* @param property the geometry property
* @param srsName the SRS name of a common SRS for the whole document, may
* be <code>null</code>
* @param report the reporter
* @param decimalFormatter a decimal formatter to format geometry
* coordinates
* @throws XMLStreamException if any error occurs writing the geometry
*/
public void write(XMLStreamWriter writer, Geometry geometry, PropertyDefinition property, String srsName, IOReporter report, DecimalFormat decimalFormatter) throws XMLStreamException {
// write eventual required id
GmlWriterUtil.writeRequiredID(writer, property.getPropertyType(), null, false);
// write any srsName attribute on the parent element
writeSrsName(writer, property.getPropertyType(), geometry, srsName);
// write any srsDimension attribute on the parent element
writeSrsDimension(writer, property.getPropertyType(), geometry);
if (simplifyGeometry) {
// reduce to internal geometry
if (geometry instanceof GeometryCollection && ((GeometryCollection) geometry).getNumGeometries() == 1) {
geometry = geometry.getGeometryN(0);
}
}
Class<? extends Geometry> geomType = geometry.getClass();
// remember if we already found a solution to this problem
List<DefinitionPath> preferredPaths = restoreCandidate(property.getPropertyType(), geomType);
if (preferredPaths == null) {
// find candidates
List<DefinitionPath> candidates = findCandidates(property, geomType);
// if no candidate found, try with compatible geometries
Class<? extends Geometry> originalType = geomType;
Geometry originalGeometry = geometry;
ConversionLadder ladder = GeometryConverterRegistry.getInstance().createLadder(geometry);
while (candidates.isEmpty() && ladder.hasNext()) {
geometry = ladder.next();
geomType = geometry.getClass();
log.info(// $NON-NLS-1$
"Possible structure for writing " + originalType.getSimpleName() + " not found, trying " + geomType.getSimpleName() + // $NON-NLS-1$ //$NON-NLS-2$
" instead");
List<DefinitionPath> candPaths = restoreCandidate(property.getPropertyType(), geomType);
if (candPaths != null) {
// use stored candidate
candidates = new ArrayList<>(candPaths);
} else {
candidates = findCandidates(property, geomType);
}
}
if (candidates.isEmpty()) {
// also try the generic geometry type
geometry = originalGeometry;
geomType = Geometry.class;
log.info(// $NON-NLS-1$
"Possible structure for writing " + originalType.getSimpleName() + // $NON-NLS-1$ //$NON-NLS-2$
" not found, trying the generic geometry type instead");
List<DefinitionPath> candPaths = restoreCandidate(property.getPropertyType(), geomType);
if (candPaths != null) {
// use stored candidate
candidates = new ArrayList<>(candidates);
} else {
candidates = findCandidates(property, geomType);
}
// remember generic match for later
storeCandidate(property.getPropertyType(), originalType, sortPreferredCandidates(candidates, geomType));
}
for (DefinitionPath candidate : candidates) {
log.info(// $NON-NLS-1$ //$NON-NLS-2$
"Geometry structure match: " + geomType.getSimpleName() + " - " + candidate);
}
if (candidates.isEmpty()) {
log.error(// $NON-NLS-1$
"No geometry structure match for " + originalType.getSimpleName() + // $NON-NLS-1$
" found, writing WKT " + // $NON-NLS-1$
"representation instead");
writer.writeCharacters(originalGeometry.toText());
return;
}
// determine preferred candidate
preferredPaths = sortPreferredCandidates(candidates, geomType);
// remember for later
storeCandidate(property.getPropertyType(), geomType, preferredPaths);
}
DefinitionPath path = selectValidPath(preferredPaths, geometry);
if (path != null) {
// write geometry
writeGeometry(writer, geometry, path, srsName, decimalFormatter);
} else {
report.error(new IOMessageImpl("No valid path found for encoding geometry, geometry is skipped.", null));
}
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class JDBCInstanceWriter method execute.
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
InstanceCollection instances = getInstances();
Connection connection = null;
try {
// connect to the database
try {
connection = getConnection();
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
reporter.setSummary("Failed to connect to database.");
return reporter;
}
if (isWriteUnordered()) {
// write instances as they come in
writeInstances(connection, instances, progress, reporter);
} else {
// write instances based on type order needed for insert
// (to avoid violating constraints)
Set<TypeDefinition> sortedSet = getSortedSchemas(getTargetSchema().getMappingRelevantTypes());
for (TypeDefinition td : sortedSet) {
writeInstances(connection, instances.select(new TypeFilter(td)), progress, reporter);
}
}
reporter.setSuccess(true);
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
reporter.setSummary("Saving instances to database failed.");
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
// ignore
}
}
progress.end();
}
return reporter;
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class SQLSchemaReader method loadFromSource.
@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
DefaultSchema typeIndex = null;
String query = null;
Text text = getParameter(PARAM_SQL).as(Text.class);
if (text != null) {
query = text.getText();
}
if (query == null) {
query = getParameter(PARAM_SQL).as(String.class);
}
if (query == null) {
reporter.setSuccess(false);
reporter.setSummary("No SQL query specified");
return null;
}
String typename = getParameter(PARAM_TYPE_NAME).as(String.class);
if (typename == null) {
reporter.setSuccess(false);
reporter.setSummary("Name of the type that the SQL query should be represented as must be specified");
return null;
}
progress.begin("Read SQL query schema", ProgressIndicator.UNKNOWN);
Connection connection = null;
try {
// connect to the database
try {
connection = getConnection();
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
reporter.setSummary("Failed to connect to database.");
return null;
}
// connection has been created), report a warning message instead
try {
connection.setReadOnly(true);
} catch (SQLException e) {
// ignore
// reporter.warn(new IOMessageImpl(e.getLocalizedMessage(), e));
}
connection.setAutoCommit(false);
// get advisor
JDBCSchemaReaderAdvisor advisor = SchemaReaderAdvisorExtension.getInstance().getAdvisor(connection);
// determine quotes character
@SuppressWarnings("unused") String quotes = determineQuoteString(connection);
// FIXME not actually used here or in JDBC schema reader
URI jdbcURI = getSource().getLocation();
String dbNamespace = determineNamespace(jdbcURI, advisor);
String namespace = NAMESPACE;
SchemaCrawlerOptions options = new SchemaCrawlerOptions();
SchemaInfoLevel level = new SchemaInfoLevel();
level.setTag("hale");
// these are enabled by default, we don't need them (yet)
level.setRetrieveSchemaCrawlerInfo(false);
level.setRetrieveJdbcDriverInfo(false);
level.setRetrieveDatabaseInfo(false);
level.setRetrieveTables(false);
level.setRetrieveTableColumns(false);
level.setRetrieveForeignKeys(false);
// set what we need
level.setRetrieveColumnDataTypes(true);
level.setRetrieveUserDefinedColumnDataTypes(true);
options.setSchemaInfoLevel(level);
if (advisor != null) {
advisor.configureSchemaCrawler(options);
}
final Catalog database = SchemaCrawlerUtility.getCatalog(connection, options);
// create the type index
typeIndex = new DefaultSchema(dbNamespace, jdbcURI);
Statement st = null;
try {
st = JDBCUtil.createReadStatement(connection, 1);
// support project variables
String processedQuery = JDBCUtil.replaceVariables(query, getServiceProvider());
ResultSet result = st.executeQuery(processedQuery);
// the query represents a type
// get the type definition
TypeDefinition type = addTableType(query, namespace, typeIndex, connection, reporter, typename);
ResultsColumns additionalInfo = SchemaCrawlerUtility.getResultColumns(result);
for (final ResultsColumn column : additionalInfo.getColumns()) {
getOrCreateProperty(type, column, namespace, typeIndex, connection, reporter, database);
}
} finally {
if (st != null) {
st.close();
}
}
reporter.setSuccess(true);
} catch (Exception e) {
throw new IOProviderConfigurationException("Failed to read database schema", e);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
// ignore
}
}
progress.end();
}
return typeIndex;
}
use of eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl in project hale by halestudio.
the class JacksonMapper method isValidJSON.
/**
* Validates a JSON stream
*
* @param in the JSON stream
* @param reporter the reporter
* @return true if valid, else false
*/
public boolean isValidJSON(final InputStream in, IOReporter reporter) {
boolean valid = false;
try {
final JsonParser parser = new ObjectMapper().getJsonFactory().createJsonParser(in);
while (parser.nextToken() != null) {
//
}
valid = true;
} catch (Exception e) {
reporter.error(new IOMessageImpl("Produced invalid JSON output", e));
}
return valid;
}
Aggregations