use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class JDBCInstanceWriter method getSortedSchemas.
/**
* Retrieves the target schema and orders it based on the references, so
* that there should not be any integrity constrain violation exception.
* <p>
* <b> Algorithm for sorting </b>
* </p>
* The steps for sorting the types based on references are as follows:
* <ol>
* <li>Create a new <code>set</code> for the sorted types</li>
* <li>Create a visited type map to keep hold of the types whose references
* have already been checked.
*
* <li>Iterate over the types that needs to be sorted
* <ol>
* <li>If the type is already in the visited map means references for this
* type have already been checked. (Again checking could lead to cyclic
* referencing in the infinite loop)</li>
* <li>Exit the current recursion.</li>
* <li>Iterate over the child definition of the types and check for the
* referenced types</li>
* <li>If there are reference types then iterate over each referenced type
* and recurse to the step (3.1) with this iterated referenced type</li>
* <li>Traverse till the type whose childs do not have any reference, add
* this type to the sorted set first</li>
* <li>Add the iterated type</li>
*
* </ol>
* </li>
* <li>Add the sorted types in the sorted set</li>
* </ol>
*
* @param collection types to be sorted
*
* @return returns the sorted schema based on the references
*/
public Set<TypeDefinition> getSortedSchemas(Collection<? extends TypeDefinition> collection) {
Set<TypeDefinition> sortedSet = new LinkedHashSet<TypeDefinition>();
visitedType = new HashMap<TypeDefinition, Boolean>();
for (TypeDefinition td : collection) {
if (!sortedSet.contains(td)) {
List<TypeDefinition> references = getReferencesTd(td);
if (references != null)
sortedSet.addAll(references);
}
}
return sortedSet;
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class JDBCInstanceWriter method execute.
@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
InstanceCollection instances = getInstances();
Connection connection = null;
try {
// connect to the database
try {
connection = getConnection();
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
reporter.setSummary("Failed to connect to database.");
return reporter;
}
if (isWriteUnordered()) {
// write instances as they come in
writeInstances(connection, instances, progress, reporter);
} else {
// write instances based on type order needed for insert
// (to avoid violating constraints)
Set<TypeDefinition> sortedSet = getSortedSchemas(getTargetSchema().getMappingRelevantTypes());
for (TypeDefinition td : sortedSet) {
writeInstances(connection, instances.select(new TypeFilter(td)), progress, reporter);
}
}
reporter.setSuccess(true);
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
reporter.setSummary("Saving instances to database failed.");
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
// ignore
}
}
progress.end();
}
return reporter;
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class SQLSchemaReader method addTableType.
/**
* Create the type definition for a query.
*
* @param query the SQL query
* @param namespace the namespace for the type
* @param types the schema to add the type to
* @param connection the database connection
* @param reporter the reporter
* @param typename the name to use for the type
*
* @return the type definition for the given table
*/
private TypeDefinition addTableType(String query, String namespace, DefaultSchema types, Connection connection, IOReporter reporter, String typename) {
QName typeName = new QName(namespace, unquote(typename));
// check for existing type
TypeDefinition existingType = types.getType(typeName);
if (existingType != null)
return existingType;
// create new type
DefaultTypeDefinition type = new DefaultTypeDefinition(typeName);
// set SQL query as description
type.setDescription(query);
type.setConstraint(new SQLQuery(query));
// configure type
type.setConstraint(MappableFlag.ENABLED);
type.setConstraint(MappingRelevantFlag.ENABLED);
type.setConstraint(HasValueFlag.DISABLED);
types.addType(type);
return type;
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class SQLSchemaReader method loadFromSource.
@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
DefaultSchema typeIndex = null;
String query = null;
Text text = getParameter(PARAM_SQL).as(Text.class);
if (text != null) {
query = text.getText();
}
if (query == null) {
query = getParameter(PARAM_SQL).as(String.class);
}
if (query == null) {
reporter.setSuccess(false);
reporter.setSummary("No SQL query specified");
return null;
}
String typename = getParameter(PARAM_TYPE_NAME).as(String.class);
if (typename == null) {
reporter.setSuccess(false);
reporter.setSummary("Name of the type that the SQL query should be represented as must be specified");
return null;
}
progress.begin("Read SQL query schema", ProgressIndicator.UNKNOWN);
Connection connection = null;
try {
// connect to the database
try {
connection = getConnection();
} catch (Exception e) {
reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
reporter.setSuccess(false);
reporter.setSummary("Failed to connect to database.");
return null;
}
// connection has been created), report a warning message instead
try {
connection.setReadOnly(true);
} catch (SQLException e) {
// ignore
// reporter.warn(new IOMessageImpl(e.getLocalizedMessage(), e));
}
connection.setAutoCommit(false);
// get advisor
JDBCSchemaReaderAdvisor advisor = SchemaReaderAdvisorExtension.getInstance().getAdvisor(connection);
// determine quotes character
@SuppressWarnings("unused") String quotes = determineQuoteString(connection);
// FIXME not actually used here or in JDBC schema reader
URI jdbcURI = getSource().getLocation();
String dbNamespace = determineNamespace(jdbcURI, advisor);
String namespace = NAMESPACE;
SchemaCrawlerOptions options = new SchemaCrawlerOptions();
SchemaInfoLevel level = new SchemaInfoLevel();
level.setTag("hale");
// these are enabled by default, we don't need them (yet)
level.setRetrieveSchemaCrawlerInfo(false);
level.setRetrieveJdbcDriverInfo(false);
level.setRetrieveDatabaseInfo(false);
level.setRetrieveTables(false);
level.setRetrieveTableColumns(false);
level.setRetrieveForeignKeys(false);
// set what we need
level.setRetrieveColumnDataTypes(true);
level.setRetrieveUserDefinedColumnDataTypes(true);
options.setSchemaInfoLevel(level);
if (advisor != null) {
advisor.configureSchemaCrawler(options);
}
final Catalog database = SchemaCrawlerUtility.getCatalog(connection, options);
// create the type index
typeIndex = new DefaultSchema(dbNamespace, jdbcURI);
Statement st = null;
try {
st = JDBCUtil.createReadStatement(connection, 1);
// support project variables
String processedQuery = JDBCUtil.replaceVariables(query, getServiceProvider());
ResultSet result = st.executeQuery(processedQuery);
// the query represents a type
// get the type definition
TypeDefinition type = addTableType(query, namespace, typeIndex, connection, reporter, typename);
ResultsColumns additionalInfo = SchemaCrawlerUtility.getResultColumns(result);
for (final ResultsColumn column : additionalInfo.getColumns()) {
getOrCreateProperty(type, column, namespace, typeIndex, connection, reporter, database);
}
} finally {
if (st != null) {
st.close();
}
}
reporter.setSuccess(true);
} catch (Exception e) {
throw new IOProviderConfigurationException("Failed to read database schema", e);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
// ignore
}
}
progress.end();
}
return typeIndex;
}
use of eu.esdihumboldt.hale.common.schema.model.TypeDefinition in project hale by halestudio.
the class DefinitionInstanceTreeViewer method setInput.
/**
* @see InstanceViewer#setInput(TypeDefinition, Iterable)
*/
@Override
public void setInput(TypeDefinition type, Iterable<Instance> instances) {
// remove old columns
TreeColumn[] columns = treeViewer.getTree().getColumns();
if (columns != null) {
for (TreeColumn column : columns) {
column.dispose();
}
labelProviders.clear();
}
// set input
if (type != null) {
// pass metadatas to the treeviewer, if instances contain metadatas
Set<String> completeMetaNames = new HashSet<String>();
for (Instance inst : instances) {
for (String name : inst.getMetaDataNames()) {
completeMetaNames.add(name);
}
}
Pair<TypeDefinition, Set<String>> pair = new Pair<TypeDefinition, Set<String>>(type, completeMetaNames);
treeViewer.setInput(pair);
} else
treeViewer.setInput(Collections.emptySet());
Layout layout = treeViewer.getTree().getParent().getLayout();
// add type column
if (type != null) {
TreeViewerColumn column = new TreeViewerColumn(treeViewer, SWT.LEFT);
column.getColumn().setText(type.getDisplayName());
column.setLabelProvider(new TreeColumnViewerLabelProvider(new DefinitionMetaCompareLabelProvider(treeViewer)));
if (layout instanceof TreeColumnLayout) {
((TreeColumnLayout) layout).setColumnData(column.getColumn(), new ColumnWeightData(1));
}
}
// add columns for features
int index = 1;
if (instances != null) {
// // sort features
// List<Feature> sortedFeatures = new ArrayList<Feature>();
// for (Feature f : features) {
// sortedFeatures.add(f);
// }
// Collections.sort(sortedFeatures, new Comparator<Feature>() {
//
// @Override
// public int compare(Feature o1, Feature o2) {
// FeatureId id1 = FeatureBuilder.getSourceID(o1);
// if (id1 == null) {
// id1 = o1.getIdentifier();
// }
//
// FeatureId id2 = FeatureBuilder.getSourceID(o2);
// if (id2 == null) {
// id2 = o2.getIdentifier();
// }
//
// return id1.getID().compareTo(id2.getID());
// }
//
// });
List<Instance> insts = new ArrayList<Instance>();
for (Instance instance : instances) {
// sortedFeatures) {
final TreeViewerColumn column = new TreeViewerColumn(treeViewer, SWT.LEFT);
// FeatureId id = FeatureBuilder.getSourceID(feature);
// if (id == null) {
// id = feature.getIdentifier();
// }
// column.getColumn().setText(id.toString());
// XXX
column.getColumn().setText(String.valueOf(index));
// identifier?
DefinitionInstanceLabelProvider labelProvider = new DefinitionInstanceLabelProvider(instance);
labelProviders.put(index, labelProvider);
column.setLabelProvider(labelProvider);
if (layout instanceof TreeColumnLayout) {
((TreeColumnLayout) layout).setColumnData(column.getColumn(), new ColumnWeightData(1));
}
// add tool tip
// new ColumnBrowserTip(treeViewer, 400, 300, true, index, null);
insts.add(instance);
index++;
}
((MetadataCompareActionProvider) maep).setInput(insts, labelProviders);
}
treeViewer.refresh();
treeViewer.getTree().getParent().layout(true, true);
selectionProvider.updateSelection(instances);
// auto-expand attributes/metadata
treeViewer.expandToLevel(2);
}
Aggregations