use of org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO in project data-access by pentaho.
the class DSWDatasourceServiceImpl method deSerializeModelState.
public DatasourceDTO deSerializeModelState(String dtoStr) throws DatasourceServiceException {
XStream xs = new XStream();
xs.setClassLoader(DatasourceDTO.class.getClassLoader());
if (dtoStr.startsWith("<org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO>") && dtoStr.endsWith("</org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO>")) {
return (DatasourceDTO) xs.fromXML(dtoStr);
} else {
logger.error(Messages.getErrorString(// $NON-NLS-1$
"DatasourceServiceImpl.ERROR_0025_STRING_FOR_DESERIALIZATION_IS_NOT_VALID"));
throw new DatasourceServiceException(Messages.getErrorString(// $NON-NLS-1$
"DatasourceServiceImpl.ERROR_0025_STRING_FOR_DESERIALIZATION_IS_NOT_VALID"));
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO in project data-access by pentaho.
the class DSWDatasourceServiceImpl method deleteLogicalModel.
public boolean deleteLogicalModel(String domainId, String modelName) throws DatasourceServiceException {
if (!hasDataAccessPermission()) {
// $NON-NLS-1$
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));
return false;
}
String catalogRef = null;
String targetTable = null;
try {
// first load the model
Domain domain = getMetadataDomainRepository().getDomain(domainId);
ModelerWorkspace model = createModelerWorkspace();
model.setDomain(domain);
LogicalModel logicalModel = model.getLogicalModel(ModelerPerspective.ANALYSIS);
if (logicalModel == null) {
logicalModel = model.getLogicalModel(ModelerPerspective.REPORTING);
}
LogicalModel logicalModelRep = model.getLogicalModel(ModelerPerspective.REPORTING);
// CSV related data is bounded to reporting model so need to perform some additional clean up here
if (logicalModelRep != null) {
String modelState = (String) logicalModelRep.getProperty(LM_PROP_DATASOURCE_MODEL);
// TODO: use the edit story's stored info to do this
if ("CSV".equals(logicalModelRep.getProperty(LM_PROP_DATASOURCE_TYPE)) || "true".equalsIgnoreCase((String) logicalModelRep.getProperty(LogicalModel.PROPERTY_TARGET_TABLE_STAGED))) {
targetTable = ((SqlPhysicalTable) domain.getPhysicalModels().get(0).getPhysicalTables().get(0)).getTargetTable();
DatasourceDTO datasource = null;
if (modelState != null) {
datasource = deSerializeModelState(modelState);
}
if (datasource != null) {
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
try {
csvTransformGenerator.dropTable(targetTable);
} catch (CsvTransformGeneratorException e) {
// table might not be there, it's OK that is what we were trying to do anyway
logger.warn(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_UNABLE_TO_DROP_TABLE", targetTable, domainId, e.getLocalizedMessage()), // $NON-NLS-1$
e);
}
String fileName = datasource.getCsvModelInfo().getFileInfo().getFilename();
FileUtils fileService = new FileUtils();
if (fileName != null) {
fileService.deleteFile(fileName);
}
}
}
}
// if associated mondrian file, delete
if (logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF) != null) {
// remove Mondrian schema
IMondrianCatalogService service = PentahoSystem.get(IMondrianCatalogService.class, null);
catalogRef = (String) logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF);
// check if the model is not already removed
if (service.getCatalog(catalogRef, PentahoSessionHolder.getSession()) != null) {
service.removeCatalog(catalogRef, PentahoSessionHolder.getSession());
}
}
getMetadataDomainRepository().removeModel(domainId, logicalModel.getId());
if (logicalModelRep != null && !logicalModelRep.getId().equals(logicalModel.getId())) {
getMetadataDomainRepository().removeModel(domainId, logicalModelRep.getId());
}
// get updated domain
domain = getMetadataDomainRepository().getDomain(domainId);
if (domain == null) {
// already deleted
return true;
}
if (domain.getLogicalModels() == null || domain.getLogicalModels().isEmpty()) {
getMetadataDomainRepository().removeDomain(domainId);
}
} catch (MondrianCatalogServiceException me) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
} catch (DomainStorageException dse) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0017_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0016_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
} catch (DomainIdNullException dne) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
}
return true;
}
Aggregations