use of org.pentaho.metadata.repository.DomainStorageException in project pentaho-platform by pentaho.
the class SolutionFolderIT method testOneLocaleFileDiscovery.
/**
* Tests MetadataDomainRepository.getLocalePropertyFilenames() when one xmi resource is in the meta data folder with
* one property file.
* Update: Test Domain.getLocaleCodes() when we added the new locale on existing domain.
*/
@Test
public void testOneLocaleFileDiscovery() {
try {
final Domain steelWheels = loadDomain(STEEL_WHEELS, "./" + LEGACY_XMI_FILENAME);
// get a list of locale codes
final int initialLocaleSize = steelWheels.getLocaleCodes().length;
// add new locale
steelWheels.addLocale(new LocaleType("en_US", "Test locale"));
int localizationNewSize = steelWheels.getLocaleCodes().length;
// we expect a the size of previous list + 1
assertNotNull(localizationNewSize);
assertEquals(localizationNewSize, initialLocaleSize + 1);
} catch (IOException ioe) {
fail(ioe.getMessage());
} catch (DomainStorageException dse) {
fail(dse.getMessage());
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.pentaho.metadata.repository.DomainStorageException in project data-access by pentaho.
the class DSWDatasourceServiceImpl method deleteLogicalModel.
public boolean deleteLogicalModel(String domainId, String modelName) throws DatasourceServiceException {
if (!hasDataAccessPermission()) {
// $NON-NLS-1$
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));
return false;
}
String catalogRef = null;
String targetTable = null;
try {
// first load the model
Domain domain = getMetadataDomainRepository().getDomain(domainId);
ModelerWorkspace model = createModelerWorkspace();
model.setDomain(domain);
LogicalModel logicalModel = model.getLogicalModel(ModelerPerspective.ANALYSIS);
if (logicalModel == null) {
logicalModel = model.getLogicalModel(ModelerPerspective.REPORTING);
}
LogicalModel logicalModelRep = model.getLogicalModel(ModelerPerspective.REPORTING);
// CSV related data is bounded to reporting model so need to perform some additional clean up here
if (logicalModelRep != null) {
String modelState = (String) logicalModelRep.getProperty(LM_PROP_DATASOURCE_MODEL);
// TODO: use the edit story's stored info to do this
if ("CSV".equals(logicalModelRep.getProperty(LM_PROP_DATASOURCE_TYPE)) || "true".equalsIgnoreCase((String) logicalModelRep.getProperty(LogicalModel.PROPERTY_TARGET_TABLE_STAGED))) {
targetTable = ((SqlPhysicalTable) domain.getPhysicalModels().get(0).getPhysicalTables().get(0)).getTargetTable();
DatasourceDTO datasource = null;
if (modelState != null) {
datasource = deSerializeModelState(modelState);
}
if (datasource != null) {
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
try {
csvTransformGenerator.dropTable(targetTable);
} catch (CsvTransformGeneratorException e) {
// table might not be there, it's OK that is what we were trying to do anyway
logger.warn(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_UNABLE_TO_DROP_TABLE", targetTable, domainId, e.getLocalizedMessage()), // $NON-NLS-1$
e);
}
String fileName = datasource.getCsvModelInfo().getFileInfo().getFilename();
FileUtils fileService = new FileUtils();
if (fileName != null) {
fileService.deleteFile(fileName);
}
}
}
}
// if associated mondrian file, delete
if (logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF) != null) {
// remove Mondrian schema
IMondrianCatalogService service = PentahoSystem.get(IMondrianCatalogService.class, null);
catalogRef = (String) logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF);
// check if the model is not already removed
if (service.getCatalog(catalogRef, PentahoSessionHolder.getSession()) != null) {
service.removeCatalog(catalogRef, PentahoSessionHolder.getSession());
}
}
getMetadataDomainRepository().removeModel(domainId, logicalModel.getId());
if (logicalModelRep != null && !logicalModelRep.getId().equals(logicalModel.getId())) {
getMetadataDomainRepository().removeModel(domainId, logicalModelRep.getId());
}
// get updated domain
domain = getMetadataDomainRepository().getDomain(domainId);
if (domain == null) {
// already deleted
return true;
}
if (domain.getLogicalModels() == null || domain.getLogicalModels().isEmpty()) {
getMetadataDomainRepository().removeDomain(domainId);
}
} catch (MondrianCatalogServiceException me) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
} catch (DomainStorageException dse) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0017_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0016_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
} catch (DomainIdNullException dne) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
}
return true;
}
Aggregations