Search in sources :

Example 1 with DefaultSchema

use of eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema in project hale by halestudio.

the class SchemaBuilderReader method execute.

@Override
protected IOReport execute(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    progress.begin("Run schema builder", ProgressIndicator.UNKNOWN);
    try {
        CompilerConfiguration compilerConfiguration = new CompilerConfiguration();
        compilerConfiguration.setScriptBaseClass(DelegatingScript.class.getName());
        // Configure the GroovyShell and pass the compiler configuration.
        GroovyShell shell = new GroovyShell(getClass().getClassLoader(), new Binding(), compilerConfiguration);
        DelegatingScript script;
        try (InputStream in = getSource().getInput();
            InputStreamReader reader = new InputStreamReader(in, getCharset())) {
            script = (DelegatingScript) shell.parse(reader);
        }
        SchemaBuilder builder = new SchemaBuilder();
        script.setDelegate(builder);
        Object res = script.run();
        if (res == null) {
            throw new IllegalStateException("Null returned by script");
        } else if (res instanceof Schema) {
            schema = (Schema) res;
        } else if (res instanceof TypeIndex) {
            DefaultSchema s = new DefaultSchema(null, getSource().getLocation());
            for (TypeDefinition type : ((TypeIndex) res).getTypes()) {
                s.addType(type);
            }
            schema = s;
        } else if (res instanceof TypeDefinition) {
            DefaultSchema s = new DefaultSchema(null, getSource().getLocation());
            s.addType((TypeDefinition) res);
            schema = s;
        } else {
            throw new IllegalStateException("Unrecognised return type: " + res.getClass().getName());
        }
        reporter.setSuccess(true);
    } catch (Exception e) {
        reporter.setSuccess(false);
        reporter.error("Error running schema builder", e);
    } finally {
        progress.end();
    }
    return reporter;
}
Also used : Binding(groovy.lang.Binding) InputStreamReader(java.io.InputStreamReader) InputStream(java.io.InputStream) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema) Schema(eu.esdihumboldt.hale.common.schema.model.Schema) GroovyShell(groovy.lang.GroovyShell) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) TypeDefinition(eu.esdihumboldt.hale.common.schema.model.TypeDefinition) TypeIndex(eu.esdihumboldt.hale.common.schema.model.TypeIndex) DelegatingScript(groovy.util.DelegatingScript) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema) CompilerConfiguration(org.codehaus.groovy.control.CompilerConfiguration) SchemaBuilder(eu.esdihumboldt.hale.common.schema.groovy.SchemaBuilder)

Example 2 with DefaultSchema

use of eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema in project hale by halestudio.

the class ShapeSchemaReader method loadFromSource.

@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    // $NON-NLS-1$
    progress.begin(Messages.getString("ShapeSchemaProvider.1"), ProgressIndicator.UNKNOWN);
    // DataStore store = new ShapefileDataStoreFactory().createDataStore(location.toURL());
    // DataStore store = FileDataStoreFinder.getDataStore(getSource().getLocation().toURL());
    ShapefileDataStore store = new ShapefileDataStore(getSource().getLocation().toURL());
    store.setCharset(getCharset());
    // TODO namespace from configuration parameter?!
    String namespace = ShapefileConstants.SHAPEFILE_NS;
    DefaultSchema schema = new DefaultSchema(namespace, getSource().getLocation());
    // $NON-NLS-1$
    progress.setCurrentTask(Messages.getString("ShapeSchemaProvider.2"));
    // create type for augmented filename property
    QName filenameTypeName = new QName(SHAPEFILE_AUGMENT_NS, "filenameType");
    TypeDefinition filenameType = null;
    if (getSharedTypes() != null) {
        filenameType = getSharedTypes().getType(filenameTypeName);
    }
    if (filenameType == null) {
        DefaultTypeDefinition fnt = new DefaultTypeDefinition(filenameTypeName);
        fnt.setConstraint(MappableFlag.DISABLED);
        fnt.setConstraint(MappingRelevantFlag.DISABLED);
        fnt.setConstraint(Binding.get(String.class));
        fnt.setConstraint(HasValueFlag.ENABLED);
        filenameType = fnt;
    }
    // build type definitions based on Schema extracted by geotools
    for (Name name : store.getNames()) {
        SimpleFeatureType sft = store.getSchema(name);
        try {
            // create type definition
            DefaultTypeDefinition type = new DefaultTypeDefinition(new QName(namespace, sft.getName().getLocalPart()));
            // constraints on main type
            type.setConstraint(MappingRelevantFlag.ENABLED);
            type.setConstraint(MappableFlag.ENABLED);
            type.setConstraint(HasValueFlag.DISABLED);
            type.setConstraint(AbstractFlag.DISABLED);
            type.setConstraint(Binding.get(Instance.class));
            for (AttributeDescriptor ad : sft.getAttributeDescriptors()) {
                DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(ad.getLocalName()), type, getTypeFromAttributeType(ad.getType(), schema, namespace));
                // set constraints on property
                // nillable
                property.setConstraint(NillableFlag.get(ad.isNillable()));
                // cardinality
                property.setConstraint(Cardinality.get(ad.getMinOccurs(), ad.getMaxOccurs()));
                // set metadata
                property.setLocation(getSource().getLocation());
            }
            // add additional filename property
            // String filename = sft.getName().getLocalPart();
            DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(SHAPEFILE_AUGMENT_NS, AUGMENTED_PROPERTY_FILENAME), type, filenameType);
            property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
            property.setConstraint(NillableFlag.ENABLED);
            schema.addType(type);
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
        progress.setCurrentTask(MessageFormat.format(// $NON-NLS-1$
        Messages.getString("ShapeSchemaProvider.7"), sft.getTypeName()));
    }
    reporter.setSuccess(true);
    return schema;
}
Also used : DefaultPropertyDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultPropertyDefinition) ShapefileDataStore(org.geotools.data.shapefile.ShapefileDataStore) Instance(eu.esdihumboldt.hale.common.instance.model.Instance) QName(javax.xml.namespace.QName) AttributeDescriptor(org.opengis.feature.type.AttributeDescriptor) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) DefaultTypeDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition) TypeDefinition(eu.esdihumboldt.hale.common.schema.model.TypeDefinition) Name(org.opengis.feature.type.Name) QName(javax.xml.namespace.QName) DefaultTypeDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition) SimpleFeatureType(org.opengis.feature.simple.SimpleFeatureType) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema)

Example 3 with DefaultSchema

use of eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema in project hale by halestudio.

the class XLSSchemaReader method loadFromSource.

@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    sheetNum = getParameter(InstanceTableIOConstants.SHEET_INDEX).as(int.class, 0);
    progress.begin("Load XLS/XLSX schema", ProgressIndicator.UNKNOWN);
    String namespace = "http://www.esdi-humboldt.eu/hale/xls";
    DefaultSchema schema = new DefaultSchema(namespace, getSource().getLocation());
    AnalyseXLSSchemaTable analyser;
    try {
        analyser = new AnalyseXLSSchemaTable(getSource().getLocation(), sheetNum);
        header = analyser.getHeader();
        // create type definition
        String typename = getParameter(CommonSchemaConstants.PARAM_TYPENAME).as(String.class);
        if (typename == null || typename.isEmpty()) {
            reporter.setSuccess(false);
            reporter.error(new IOMessageImpl("No Typename was set", null));
            return null;
        }
        DefaultTypeDefinition type = new DefaultTypeDefinition(new QName(typename));
        // constraints on main type
        type.setConstraint(MappingRelevantFlag.ENABLED);
        type.setConstraint(MappableFlag.ENABLED);
        type.setConstraint(HasValueFlag.DISABLED);
        type.setConstraint(AbstractFlag.DISABLED);
        // set metadata for main type
        type.setLocation(getSource().getLocation());
        StringBuffer defaultPropertyTypeBuffer = new StringBuffer();
        String[] comboSelections;
        if (getParameter(PARAM_PROPERTYTYPE).isEmpty()) {
            for (int i = 0; i < header.size(); i++) {
                defaultPropertyTypeBuffer.append("java.lang.String");
                defaultPropertyTypeBuffer.append(",");
            }
            defaultPropertyTypeBuffer.deleteCharAt(defaultPropertyTypeBuffer.lastIndexOf(","));
            String combs = defaultPropertyTypeBuffer.toString();
            comboSelections = combs.split(",");
        } else {
            comboSelections = getParameter(PARAM_PROPERTYTYPE).as(String.class).split(",");
        }
        String[] properties;
        if (getParameter(PARAM_PROPERTY).isEmpty()) {
            properties = header.toArray(new String[0]);
        } else {
            properties = getParameter(PARAM_PROPERTY).as(String.class).split(",");
        }
        // than the entries in the first line
        if ((header.size() != properties.length && properties.length != 0) || (header.size() != comboSelections.length && comboSelections.length != 0)) {
            fail("Not the same number of entries for property names, property types and words in the first line of the file");
        }
        for (int i = 0; i < comboSelections.length; i++) {
            PropertyType propertyType = PropertyTypeExtension.getInstance().getFactory(comboSelections[i]).createExtensionObject();
            DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(properties[i]), type, propertyType.getTypeDefinition());
            configureProperty(property);
        }
        boolean skip = Arrays.equals(properties, header.toArray(new String[0]));
        type.setConstraint(new CSVConfiguration(CSVUtil.getSep(this), CSVUtil.getQuote(this), CSVUtil.getEscape(this), skip));
        schema.addType(type);
    } catch (Exception e) {
        reporter.error(new IOMessageImpl("Cannot load xls/xlsx schema", e));
        reporter.setSuccess(false);
        return null;
    }
    reporter.setSuccess(true);
    return schema;
}
Also used : DefaultPropertyDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultPropertyDefinition) AnalyseXLSSchemaTable(eu.esdihumboldt.hale.io.xls.AnalyseXLSSchemaTable) QName(javax.xml.namespace.QName) IOMessageImpl(eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl) PropertyType(eu.esdihumboldt.hale.io.csv.PropertyType) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) IOException(java.io.IOException) DefaultTypeDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition) CSVConfiguration(eu.esdihumboldt.hale.io.csv.reader.internal.CSVConfiguration) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema)

Example 4 with DefaultSchema

use of eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema in project hale by halestudio.

the class SQLSchemaReader method loadFromSource.

@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    DefaultSchema typeIndex = null;
    String query = null;
    Text text = getParameter(PARAM_SQL).as(Text.class);
    if (text != null) {
        query = text.getText();
    }
    if (query == null) {
        query = getParameter(PARAM_SQL).as(String.class);
    }
    if (query == null) {
        reporter.setSuccess(false);
        reporter.setSummary("No SQL query specified");
        return null;
    }
    String typename = getParameter(PARAM_TYPE_NAME).as(String.class);
    if (typename == null) {
        reporter.setSuccess(false);
        reporter.setSummary("Name of the type that the SQL query should be represented as must be specified");
        return null;
    }
    progress.begin("Read SQL query schema", ProgressIndicator.UNKNOWN);
    Connection connection = null;
    try {
        // connect to the database
        try {
            connection = getConnection();
        } catch (Exception e) {
            reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
            reporter.setSuccess(false);
            reporter.setSummary("Failed to connect to database.");
            return null;
        }
        // connection has been created), report a warning message instead
        try {
            connection.setReadOnly(true);
        } catch (SQLException e) {
        // ignore
        // reporter.warn(new IOMessageImpl(e.getLocalizedMessage(), e));
        }
        connection.setAutoCommit(false);
        // get advisor
        JDBCSchemaReaderAdvisor advisor = SchemaReaderAdvisorExtension.getInstance().getAdvisor(connection);
        // determine quotes character
        @SuppressWarnings("unused") String quotes = determineQuoteString(connection);
        // FIXME not actually used here or in JDBC schema reader
        URI jdbcURI = getSource().getLocation();
        String dbNamespace = determineNamespace(jdbcURI, advisor);
        String namespace = NAMESPACE;
        SchemaCrawlerOptions options = new SchemaCrawlerOptions();
        SchemaInfoLevel level = new SchemaInfoLevel();
        level.setTag("hale");
        // these are enabled by default, we don't need them (yet)
        level.setRetrieveSchemaCrawlerInfo(false);
        level.setRetrieveJdbcDriverInfo(false);
        level.setRetrieveDatabaseInfo(false);
        level.setRetrieveTables(false);
        level.setRetrieveTableColumns(false);
        level.setRetrieveForeignKeys(false);
        // set what we need
        level.setRetrieveColumnDataTypes(true);
        level.setRetrieveUserDefinedColumnDataTypes(true);
        options.setSchemaInfoLevel(level);
        if (advisor != null) {
            advisor.configureSchemaCrawler(options);
        }
        final Catalog database = SchemaCrawlerUtility.getCatalog(connection, options);
        // create the type index
        typeIndex = new DefaultSchema(dbNamespace, jdbcURI);
        Statement st = null;
        try {
            st = JDBCUtil.createReadStatement(connection, 1);
            // support project variables
            String processedQuery = JDBCUtil.replaceVariables(query, getServiceProvider());
            ResultSet result = st.executeQuery(processedQuery);
            // the query represents a type
            // get the type definition
            TypeDefinition type = addTableType(query, namespace, typeIndex, connection, reporter, typename);
            ResultsColumns additionalInfo = SchemaCrawlerUtility.getResultColumns(result);
            for (final ResultsColumn column : additionalInfo.getColumns()) {
                getOrCreateProperty(type, column, namespace, typeIndex, connection, reporter, database);
            }
        } finally {
            if (st != null) {
                st.close();
            }
        }
        reporter.setSuccess(true);
    } catch (Exception e) {
        throw new IOProviderConfigurationException("Failed to read database schema", e);
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException e) {
            // ignore
            }
        }
        progress.end();
    }
    return typeIndex;
}
Also used : SchemaInfoLevel(schemacrawler.schemacrawler.SchemaInfoLevel) SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) IOMessageImpl(eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl) JDBCSchemaReaderAdvisor(eu.esdihumboldt.hale.io.jdbc.extension.JDBCSchemaReaderAdvisor) Text(eu.esdihumboldt.hale.common.core.io.Text) JDBCUtil.determineQuoteString(eu.esdihumboldt.hale.io.jdbc.JDBCUtil.determineQuoteString) SchemaCrawlerOptions(schemacrawler.schemacrawler.SchemaCrawlerOptions) URI(java.net.URI) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) SQLException(java.sql.SQLException) IOException(java.io.IOException) Catalog(schemacrawler.schema.Catalog) DefaultTypeDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition) TypeDefinition(eu.esdihumboldt.hale.common.schema.model.TypeDefinition) ResultsColumns(schemacrawler.schema.ResultsColumns) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema) ResultSet(java.sql.ResultSet) ResultsColumn(schemacrawler.schema.ResultsColumn)

Example 5 with DefaultSchema

use of eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema in project hale by halestudio.

the class JDBCSchemaReader method loadFromSource.

@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
    DefaultSchema typeIndex = null;
    progress.begin("Read database schema", ProgressIndicator.UNKNOWN);
    Connection connection = null;
    try {
        // connect to the database
        try {
            connection = getConnection();
        } catch (Exception e) {
            reporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
            reporter.setSuccess(false);
            reporter.setSummary("Failed to connect to database.");
            return null;
        }
        // connection has been created), report a warning message instead
        try {
            connection.setReadOnly(true);
        } catch (SQLException e) {
        // ignore
        // reporter.warn(new IOMessageImpl(e.getLocalizedMessage(), e));
        }
        URI jdbcURI = getSource().getLocation();
        final SchemaCrawlerOptions options = new SchemaCrawlerOptions();
        SchemaInfoLevel level = new SchemaInfoLevel();
        level.setTag("hale");
        // these are enabled by default, we don't need them (yet)
        level.setRetrieveSchemaCrawlerInfo(false);
        level.setRetrieveJdbcDriverInfo(false);
        level.setRetrieveDatabaseInfo(false);
        // set what we need
        level.setRetrieveTables(true);
        level.setRetrieveColumnDataTypes(true);
        level.setRetrieveUserDefinedColumnDataTypes(true);
        // to get table columns
        level.setRetrieveTableColumns(true);
        // information, also
        // includes primary key
        // to get linking information
        level.setRetrieveForeignKeys(true);
        // level.setRetrieveIndices(true); // to get info about UNIQUE indices for validation
        // XXX For some advanced info / DBMS specific info we'll need a
        // properties file. See Config & InformationSchemaViews.
        level.setTag("hale");
        if (getParameter(SCHEMAS).as(String.class) != null) {
            String schemas = getParameter(SCHEMAS).as(String.class).replace(',', '|');
            options.setSchemaInclusionRule(new RegularExpressionInclusionRule(schemas));
        }
        if (SchemaSpaceID.SOURCE.equals(getSchemaSpace())) {
            // show views and tables
            final List<String> tableTypesWanted = Arrays.asList("TABLE", "VIEW", "MATERIALIZED VIEW");
            // try to determine table types supported by the JDBC connection
            final List<String> tableTypeSupported = new ArrayList<>();
            try {
                ResultSet rs = connection.getMetaData().getTableTypes();
                while (rs.next()) {
                    String tableType = rs.getString(1);
                    tableTypeSupported.add(tableType);
                }
            } catch (Throwable t) {
                // Ignore, try with wanted list
                reporter.warn(new IOMessageImpl(MessageFormat.format("Could not determine supported table types for connection: {0}", t.getMessage()), t));
                tableTypeSupported.addAll(tableTypesWanted);
            }
            options.setTableTypes(tableTypesWanted.stream().filter(tt -> tableTypeSupported.contains(tt)).collect(Collectors.toList()));
        } else {
            // only show tables
            options.setTableTypes(Arrays.asList("TABLE"));
        }
        options.setSchemaInfoLevel(level);
        // get advisor
        // XXX should be created once, and used in other places if
        // applicable
        JDBCSchemaReaderAdvisor advisor = SchemaReaderAdvisorExtension.getInstance().getAdvisor(connection);
        if (advisor != null) {
            advisor.configureSchemaCrawler(options);
        }
        final Catalog database = SchemaCrawlerUtility.getCatalog(connection, options);
        @SuppressWarnings("unused") String quotes = JDBCUtil.determineQuoteString(connection);
        // FIXME not actually used here or in SQL schema reader
        String overallNamespace = JDBCUtil.determineNamespace(jdbcURI, advisor);
        // create the type index
        typeIndex = new DefaultSchema(overallNamespace, jdbcURI);
        for (final schemacrawler.schema.Schema schema : database.getSchemas()) {
            // each schema represents a namespace
            String namespace;
            if (overallNamespace.isEmpty()) {
                namespace = unquote(schema.getName());
            } else {
                namespace = overallNamespace;
                if (schema.getName() != null) {
                    namespace += ":" + unquote(schema.getName());
                }
            }
            for (final Table table : database.getTables(schema)) {
                // each table is a type
                // get the type definition
                TypeDefinition type = getOrCreateTableType(schema, table, overallNamespace, namespace, typeIndex, connection, reporter, database);
                // get ResultSetMetaData for extra info about columns (e. g.
                // auto increment)
                ResultsColumns additionalInfo = null;
                Statement stmt = null;
                try {
                    stmt = connection.createStatement();
                    // get if in table name, quotation required or not.
                    String fullTableName = getQuotedValue(table.getName());
                    if (schema.getName() != null) {
                        fullTableName = getQuotedValue(schema.getName()) + "." + fullTableName;
                    }
                    ResultSet rs = stmt.executeQuery("SELECT * FROM " + fullTableName + " WHERE 1 = 0");
                    additionalInfo = SchemaCrawlerUtility.getResultColumns(rs);
                } catch (SQLException sqle) {
                    reporter.warn(new IOMessageImpl("Couldn't retrieve additional column meta data.", sqle));
                } finally {
                    if (stmt != null)
                        try {
                            stmt.close();
                        } catch (SQLException e) {
                        // ignore
                        }
                }
                // create property definitions for each column
                for (final Column column : table.getColumns()) {
                    DefaultPropertyDefinition property = getOrCreateProperty(schema, type, column, overallNamespace, namespace, typeIndex, connection, reporter, database);
                    // XXX does not work for example for PostgreSQL
                    if (additionalInfo != null) {
                        // ResultColumns does not quote the column namen in
                        // contrast to every other place
                        ResultsColumn rc = additionalInfo.getColumn(unquote(column.getName()));
                        if (rc != null && rc.isAutoIncrement())
                            property.setConstraint(AutoIncrementFlag.get(true));
                    }
                }
            }
        }
        reporter.setSuccess(true);
    } catch (SchemaCrawlerException e) {
        throw new IOProviderConfigurationException("Failed to read database schema", e);
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException e) {
            // ignore
            }
        }
        progress.end();
    }
    return typeIndex;
}
Also used : DefaultPropertyDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultPropertyDefinition) SchemaInfoLevel(schemacrawler.schemacrawler.SchemaInfoLevel) RegularExpressionInclusionRule(schemacrawler.schemacrawler.RegularExpressionInclusionRule) SQLException(java.sql.SQLException) IOMessageImpl(eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl) ArrayList(java.util.ArrayList) URI(java.net.URI) DefaultTypeDefinition(eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition) TypeDefinition(eu.esdihumboldt.hale.common.schema.model.TypeDefinition) ResultsColumn(schemacrawler.schema.ResultsColumn) Column(schemacrawler.schema.Column) BaseColumn(schemacrawler.schema.BaseColumn) IndexColumn(schemacrawler.schema.IndexColumn) SchemaCrawlerException(schemacrawler.schemacrawler.SchemaCrawlerException) DefaultSchema(eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema) ResultSet(java.sql.ResultSet) ResultsColumn(schemacrawler.schema.ResultsColumn) Table(schemacrawler.schema.Table) DatabaseTable(eu.esdihumboldt.hale.io.jdbc.constraints.DatabaseTable) Statement(java.sql.Statement) Connection(java.sql.Connection) JDBCSchemaReaderAdvisor(eu.esdihumboldt.hale.io.jdbc.extension.JDBCSchemaReaderAdvisor) SchemaCrawlerOptions(schemacrawler.schemacrawler.SchemaCrawlerOptions) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException) SchemaCrawlerException(schemacrawler.schemacrawler.SchemaCrawlerException) SQLException(java.sql.SQLException) IOException(java.io.IOException) Catalog(schemacrawler.schema.Catalog) ResultsColumns(schemacrawler.schema.ResultsColumns) IOProviderConfigurationException(eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException)

Aggregations

DefaultSchema (eu.esdihumboldt.hale.common.schema.model.impl.DefaultSchema)7 IOProviderConfigurationException (eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException)6 DefaultTypeDefinition (eu.esdihumboldt.hale.common.schema.model.impl.DefaultTypeDefinition)6 IOException (java.io.IOException)6 TypeDefinition (eu.esdihumboldt.hale.common.schema.model.TypeDefinition)5 IOMessageImpl (eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl)4 DefaultPropertyDefinition (eu.esdihumboldt.hale.common.schema.model.impl.DefaultPropertyDefinition)4 QName (javax.xml.namespace.QName)4 PropertyType (eu.esdihumboldt.hale.io.csv.PropertyType)2 JDBCSchemaReaderAdvisor (eu.esdihumboldt.hale.io.jdbc.extension.JDBCSchemaReaderAdvisor)2 URI (java.net.URI)2 Connection (java.sql.Connection)2 ResultSet (java.sql.ResultSet)2 SQLException (java.sql.SQLException)2 Statement (java.sql.Statement)2 Catalog (schemacrawler.schema.Catalog)2 CSVReader (au.com.bytecode.opencsv.CSVReader)1 BaseAlignmentCell (eu.esdihumboldt.hale.common.align.model.BaseAlignmentCell)1 Cell (eu.esdihumboldt.hale.common.align.model.Cell)1 ModifiableCell (eu.esdihumboldt.hale.common.align.model.ModifiableCell)1