Search in sources :

Example 21 with DatasourceServiceException

use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.

the class DSWDatasourceServiceImpl method deleteLogicalModel.

public boolean deleteLogicalModel(String domainId, String modelName) throws DatasourceServiceException {
    if (!hasDataAccessPermission()) {
        // $NON-NLS-1$
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));
        return false;
    }
    String catalogRef = null;
    String targetTable = null;
    try {
        // first load the model
        Domain domain = getMetadataDomainRepository().getDomain(domainId);
        ModelerWorkspace model = createModelerWorkspace();
        model.setDomain(domain);
        LogicalModel logicalModel = model.getLogicalModel(ModelerPerspective.ANALYSIS);
        if (logicalModel == null) {
            logicalModel = model.getLogicalModel(ModelerPerspective.REPORTING);
        }
        LogicalModel logicalModelRep = model.getLogicalModel(ModelerPerspective.REPORTING);
        // CSV related data is bounded to reporting model so need to perform some additional clean up here
        if (logicalModelRep != null) {
            String modelState = (String) logicalModelRep.getProperty(LM_PROP_DATASOURCE_MODEL);
            // TODO: use the edit story's stored info to do this
            if ("CSV".equals(logicalModelRep.getProperty(LM_PROP_DATASOURCE_TYPE)) || "true".equalsIgnoreCase((String) logicalModelRep.getProperty(LogicalModel.PROPERTY_TARGET_TABLE_STAGED))) {
                targetTable = ((SqlPhysicalTable) domain.getPhysicalModels().get(0).getPhysicalTables().get(0)).getTargetTable();
                DatasourceDTO datasource = null;
                if (modelState != null) {
                    datasource = deSerializeModelState(modelState);
                }
                if (datasource != null) {
                    CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
                    try {
                        csvTransformGenerator.dropTable(targetTable);
                    } catch (CsvTransformGeneratorException e) {
                        // table might not be there, it's OK that is what we were trying to do anyway
                        logger.warn(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_UNABLE_TO_DROP_TABLE", targetTable, domainId, e.getLocalizedMessage()), // $NON-NLS-1$
                        e);
                    }
                    String fileName = datasource.getCsvModelInfo().getFileInfo().getFilename();
                    FileUtils fileService = new FileUtils();
                    if (fileName != null) {
                        fileService.deleteFile(fileName);
                    }
                }
            }
        }
        // if associated mondrian file, delete
        if (logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF) != null) {
            // remove Mondrian schema
            IMondrianCatalogService service = PentahoSystem.get(IMondrianCatalogService.class, null);
            catalogRef = (String) logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF);
            // check if the model is not already removed
            if (service.getCatalog(catalogRef, PentahoSessionHolder.getSession()) != null) {
                service.removeCatalog(catalogRef, PentahoSessionHolder.getSession());
            }
        }
        getMetadataDomainRepository().removeModel(domainId, logicalModel.getId());
        if (logicalModelRep != null && !logicalModelRep.getId().equals(logicalModel.getId())) {
            getMetadataDomainRepository().removeModel(domainId, logicalModelRep.getId());
        }
        // get updated domain
        domain = getMetadataDomainRepository().getDomain(domainId);
        if (domain == null) {
            // already deleted
            return true;
        }
        if (domain.getLogicalModels() == null || domain.getLogicalModels().isEmpty()) {
            getMetadataDomainRepository().removeDomain(domainId);
        }
    } catch (MondrianCatalogServiceException me) {
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
        me);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
        me);
    } catch (DomainStorageException dse) {
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0017_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
        dse);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0016_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
        dse);
    } catch (DomainIdNullException dne) {
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
        dne);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
        dne);
    }
    return true;
}
Also used : CsvTransformGeneratorException(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException) FileUtils(org.pentaho.platform.dataaccess.datasource.wizard.csv.FileUtils) DomainIdNullException(org.pentaho.metadata.repository.DomainIdNullException) DatasourceDTO(org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO) IMondrianCatalogService(org.pentaho.platform.plugin.action.mondrian.catalog.IMondrianCatalogService) LogicalModel(org.pentaho.metadata.model.LogicalModel) MondrianCatalogServiceException(org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalogServiceException) DomainStorageException(org.pentaho.metadata.repository.DomainStorageException) CsvTransformGenerator(org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator) Domain(org.pentaho.metadata.model.Domain) ModelerWorkspace(org.pentaho.agilebi.modeler.ModelerWorkspace) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Example 22 with DatasourceServiceException

use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.

the class MultitableDatasourceService method getTableFields.

public List<String> getTableFields(String table, IDatabaseConnection connection) throws DatasourceServiceException {
    try {
        DatabaseMeta databaseMeta = this.getDatabaseMeta(connection);
        Database database = new Database(null, databaseMeta);
        database.connect();
        String query = databaseMeta.getSQLQueryFields(table);
        // Setting the query limit to 1 before executing the query
        database.setQueryLimit(1);
        database.getRows(query, 1);
        String[] tableFields = database.getReturnRowMeta().getFieldNames();
        List<String> fields = Arrays.asList(tableFields);
        database.disconnect();
        return fields;
    } catch (KettleDatabaseException e) {
        logger.error(e);
        throw new DatasourceServiceException(e);
    } catch (ConnectionServiceException e) {
        logger.error(e);
        throw new DatasourceServiceException(e);
    }
}
Also used : ConnectionServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.ConnectionServiceException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Example 23 with DatasourceServiceException

use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.

the class MultitableDatasourceService method getDatabaseTables.

public List<String> getDatabaseTables(IDatabaseConnection connection, String schema) throws DatasourceServiceException {
    try {
        DatabaseMeta databaseMeta = this.getDatabaseMeta(connection);
        Database database = new Database(null, databaseMeta);
        database.connect();
        String[] tableNames = database.getTablenames(schema, true, this.isDataServicesConnection(connection) ? new HashMap<String, String>() {

            {
                put("STREAMING", "N");
            }
        } : null);
        List<String> tables = new ArrayList<String>();
        tables.addAll(Arrays.asList(tableNames));
        tables.addAll(Arrays.asList(database.getViews(schema, true)));
        database.disconnect();
        return tables;
    } catch (KettleDatabaseException e) {
        logger.error("Error creating database object", e);
        throw new DatasourceServiceException(e);
    } catch (ConnectionServiceException e) {
        logger.error("Error getting database meta", e);
        throw new DatasourceServiceException(e);
    }
}
Also used : ConnectionServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.ConnectionServiceException) HashMap(java.util.HashMap) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) ArrayList(java.util.ArrayList) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Example 24 with DatasourceServiceException

use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.

the class InMemoryDSWDatasourceServiceImpl method getGeoContext.

public GeoContext getGeoContext() throws DatasourceServiceException {
    try {
        Properties props = new Properties();
        props.load(new FileInputStream(new File("target/test-classes/geoContextSample.properties")));
        GeoContext geo = GeoContextFactory.create(new GeoContextPropertiesProvider(props));
        return geo;
    } catch (ModelerException e) {
        throw new DatasourceServiceException(e);
    } catch (FileNotFoundException e) {
        throw new DatasourceServiceException(e);
    } catch (IOException e) {
        // To change body of catch statement use File | Settings | File Templates.
        e.printStackTrace();
    }
    return null;
}
Also used : ModelerException(org.pentaho.agilebi.modeler.ModelerException) GeoContextPropertiesProvider(org.pentaho.agilebi.modeler.geo.GeoContextPropertiesProvider) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) Properties(java.util.Properties) File(java.io.File) FileInputStream(java.io.FileInputStream) GeoContext(org.pentaho.agilebi.modeler.geo.GeoContext) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Example 25 with DatasourceServiceException

use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.

the class InMemoryDSWDatasourceServiceImpl method generateLogicalModel.

/**
 * This method gets the business data which are the business columns, columns types and sample preview data
 *
 * @param modelName, connection, query, previewLimit
 * @return BusinessData
 * @throws DatasourceServiceException
 */
public BusinessData generateLogicalModel(String modelName, String connectionName, String dbType, String query, String previewLimit) throws DatasourceServiceException {
    try {
        executeQuery(connectionName, query, previewLimit);
        Boolean securityEnabled = (getPermittedRoleList() != null && getPermittedRoleList().size() > 0) || (getPermittedUserList() != null && getPermittedUserList().size() > 0);
        SerializedResultSet resultSet = DatasourceInMemoryServiceHelper.getSerializeableResultSet(connectionName, query, Integer.parseInt(previewLimit), null);
        SQLModelGenerator sqlModelGenerator = new SQLModelGenerator(modelName, connectionName, dbType, resultSet.getColumnTypes(), resultSet.getColumns(), query, securityEnabled, getPermittedRoleList(), getPermittedUserList(), getDefaultAcls(), "joe");
        Domain domain = sqlModelGenerator.generate();
        return new BusinessData(domain, resultSet.getData());
    } catch (SQLModelGeneratorException smge) {
        logger.error(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0016_UNABLE_TO_GENERATE_MODEL", smge.getLocalizedMessage()), smge);
        throw new DatasourceServiceException(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0015_UNABLE_TO_GENERATE_MODEL"), // $NON-NLS-1$
        smge);
    } catch (QueryValidationException e) {
        logger.error(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0009_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
        e);
        throw new DatasourceServiceException(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0009_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
        e);
    }
}
Also used : BusinessData(org.pentaho.platform.dataaccess.datasource.beans.BusinessData) SQLModelGeneratorException(org.pentaho.metadata.util.SQLModelGeneratorException) SQLModelGenerator(org.pentaho.metadata.util.SQLModelGenerator) QueryValidationException(org.pentaho.platform.dataaccess.datasource.wizard.service.QueryValidationException) SerializedResultSet(org.pentaho.platform.dataaccess.datasource.beans.SerializedResultSet) Domain(org.pentaho.metadata.model.Domain) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Aggregations

DatasourceServiceException (org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)26 ConnectionServiceException (org.pentaho.platform.dataaccess.datasource.wizard.service.ConnectionServiceException)12 SQLException (java.sql.SQLException)10 ModelerException (org.pentaho.agilebi.modeler.ModelerException)10 Domain (org.pentaho.metadata.model.Domain)10 SerializedResultSet (org.pentaho.platform.dataaccess.datasource.beans.SerializedResultSet)10 MondrianCatalogServiceException (org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalogServiceException)9 DomainIdNullException (org.pentaho.metadata.repository.DomainIdNullException)8 DomainStorageException (org.pentaho.metadata.repository.DomainStorageException)8 SqlQueriesNotSupportedException (org.pentaho.platform.dataaccess.datasource.wizard.service.SqlQueriesNotSupportedException)8 DomainAlreadyExistsException (org.pentaho.metadata.repository.DomainAlreadyExistsException)7 QueryValidationException (org.pentaho.platform.dataaccess.datasource.wizard.service.QueryValidationException)7 IOException (java.io.IOException)6 ModelerWorkspace (org.pentaho.agilebi.modeler.ModelerWorkspace)6 ArrayList (java.util.ArrayList)5 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)5 LogicalModel (org.pentaho.metadata.model.LogicalModel)5 SQLModelGeneratorException (org.pentaho.metadata.util.SQLModelGeneratorException)5 FileNotFoundException (java.io.FileNotFoundException)4 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)4