use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.
the class DSWDatasourceServiceImpl method deleteLogicalModel.
public boolean deleteLogicalModel(String domainId, String modelName) throws DatasourceServiceException {
if (!hasDataAccessPermission()) {
// $NON-NLS-1$
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));
return false;
}
String catalogRef = null;
String targetTable = null;
try {
// first load the model
Domain domain = getMetadataDomainRepository().getDomain(domainId);
ModelerWorkspace model = createModelerWorkspace();
model.setDomain(domain);
LogicalModel logicalModel = model.getLogicalModel(ModelerPerspective.ANALYSIS);
if (logicalModel == null) {
logicalModel = model.getLogicalModel(ModelerPerspective.REPORTING);
}
LogicalModel logicalModelRep = model.getLogicalModel(ModelerPerspective.REPORTING);
// CSV related data is bounded to reporting model so need to perform some additional clean up here
if (logicalModelRep != null) {
String modelState = (String) logicalModelRep.getProperty(LM_PROP_DATASOURCE_MODEL);
// TODO: use the edit story's stored info to do this
if ("CSV".equals(logicalModelRep.getProperty(LM_PROP_DATASOURCE_TYPE)) || "true".equalsIgnoreCase((String) logicalModelRep.getProperty(LogicalModel.PROPERTY_TARGET_TABLE_STAGED))) {
targetTable = ((SqlPhysicalTable) domain.getPhysicalModels().get(0).getPhysicalTables().get(0)).getTargetTable();
DatasourceDTO datasource = null;
if (modelState != null) {
datasource = deSerializeModelState(modelState);
}
if (datasource != null) {
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
try {
csvTransformGenerator.dropTable(targetTable);
} catch (CsvTransformGeneratorException e) {
// table might not be there, it's OK that is what we were trying to do anyway
logger.warn(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_UNABLE_TO_DROP_TABLE", targetTable, domainId, e.getLocalizedMessage()), // $NON-NLS-1$
e);
}
String fileName = datasource.getCsvModelInfo().getFileInfo().getFilename();
FileUtils fileService = new FileUtils();
if (fileName != null) {
fileService.deleteFile(fileName);
}
}
}
}
// if associated mondrian file, delete
if (logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF) != null) {
// remove Mondrian schema
IMondrianCatalogService service = PentahoSystem.get(IMondrianCatalogService.class, null);
catalogRef = (String) logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF);
// check if the model is not already removed
if (service.getCatalog(catalogRef, PentahoSessionHolder.getSession()) != null) {
service.removeCatalog(catalogRef, PentahoSessionHolder.getSession());
}
}
getMetadataDomainRepository().removeModel(domainId, logicalModel.getId());
if (logicalModelRep != null && !logicalModelRep.getId().equals(logicalModel.getId())) {
getMetadataDomainRepository().removeModel(domainId, logicalModelRep.getId());
}
// get updated domain
domain = getMetadataDomainRepository().getDomain(domainId);
if (domain == null) {
// already deleted
return true;
}
if (domain.getLogicalModels() == null || domain.getLogicalModels().isEmpty()) {
getMetadataDomainRepository().removeDomain(domainId);
}
} catch (MondrianCatalogServiceException me) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
} catch (DomainStorageException dse) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0017_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0016_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
} catch (DomainIdNullException dne) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
}
return true;
}
use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.
the class MultitableDatasourceService method getTableFields.
public List<String> getTableFields(String table, IDatabaseConnection connection) throws DatasourceServiceException {
try {
DatabaseMeta databaseMeta = this.getDatabaseMeta(connection);
Database database = new Database(null, databaseMeta);
database.connect();
String query = databaseMeta.getSQLQueryFields(table);
// Setting the query limit to 1 before executing the query
database.setQueryLimit(1);
database.getRows(query, 1);
String[] tableFields = database.getReturnRowMeta().getFieldNames();
List<String> fields = Arrays.asList(tableFields);
database.disconnect();
return fields;
} catch (KettleDatabaseException e) {
logger.error(e);
throw new DatasourceServiceException(e);
} catch (ConnectionServiceException e) {
logger.error(e);
throw new DatasourceServiceException(e);
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.
the class MultitableDatasourceService method getDatabaseTables.
public List<String> getDatabaseTables(IDatabaseConnection connection, String schema) throws DatasourceServiceException {
try {
DatabaseMeta databaseMeta = this.getDatabaseMeta(connection);
Database database = new Database(null, databaseMeta);
database.connect();
String[] tableNames = database.getTablenames(schema, true, this.isDataServicesConnection(connection) ? new HashMap<String, String>() {
{
put("STREAMING", "N");
}
} : null);
List<String> tables = new ArrayList<String>();
tables.addAll(Arrays.asList(tableNames));
tables.addAll(Arrays.asList(database.getViews(schema, true)));
database.disconnect();
return tables;
} catch (KettleDatabaseException e) {
logger.error("Error creating database object", e);
throw new DatasourceServiceException(e);
} catch (ConnectionServiceException e) {
logger.error("Error getting database meta", e);
throw new DatasourceServiceException(e);
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.
the class InMemoryDSWDatasourceServiceImpl method getGeoContext.
public GeoContext getGeoContext() throws DatasourceServiceException {
try {
Properties props = new Properties();
props.load(new FileInputStream(new File("target/test-classes/geoContextSample.properties")));
GeoContext geo = GeoContextFactory.create(new GeoContextPropertiesProvider(props));
return geo;
} catch (ModelerException e) {
throw new DatasourceServiceException(e);
} catch (FileNotFoundException e) {
throw new DatasourceServiceException(e);
} catch (IOException e) {
// To change body of catch statement use File | Settings | File Templates.
e.printStackTrace();
}
return null;
}
use of org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException in project data-access by pentaho.
the class InMemoryDSWDatasourceServiceImpl method generateLogicalModel.
/**
* This method gets the business data which are the business columns, columns types and sample preview data
*
* @param modelName, connection, query, previewLimit
* @return BusinessData
* @throws DatasourceServiceException
*/
public BusinessData generateLogicalModel(String modelName, String connectionName, String dbType, String query, String previewLimit) throws DatasourceServiceException {
try {
executeQuery(connectionName, query, previewLimit);
Boolean securityEnabled = (getPermittedRoleList() != null && getPermittedRoleList().size() > 0) || (getPermittedUserList() != null && getPermittedUserList().size() > 0);
SerializedResultSet resultSet = DatasourceInMemoryServiceHelper.getSerializeableResultSet(connectionName, query, Integer.parseInt(previewLimit), null);
SQLModelGenerator sqlModelGenerator = new SQLModelGenerator(modelName, connectionName, dbType, resultSet.getColumnTypes(), resultSet.getColumns(), query, securityEnabled, getPermittedRoleList(), getPermittedUserList(), getDefaultAcls(), "joe");
Domain domain = sqlModelGenerator.generate();
return new BusinessData(domain, resultSet.getData());
} catch (SQLModelGeneratorException smge) {
logger.error(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0016_UNABLE_TO_GENERATE_MODEL", smge.getLocalizedMessage()), smge);
throw new DatasourceServiceException(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0015_UNABLE_TO_GENERATE_MODEL"), // $NON-NLS-1$
smge);
} catch (QueryValidationException e) {
logger.error(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0009_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
e);
throw new DatasourceServiceException(Messages.getErrorString("InMemoryDatasourceServiceImpl.ERROR_0009_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
e);
}
}
Aggregations