use of org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator in project data-access by pentaho.
the class CsvDatasourceServiceImpl method generateDomain.
public FileTransformStats generateDomain(DatasourceDTO datasourceDto) throws Exception {
checkPermissions();
synchronized (lock) {
ModelInfo modelInfo = datasourceDto.getCsvModelInfo();
IPentahoSession pentahoSession = null;
try {
pentahoSession = PentahoSessionHolder.getSession();
KettleSystemListener.environmentInit(pentahoSession);
String statsKey = // $NON-NLS-1$
FileTransformStats.class.getSimpleName() + "_" + modelInfo.getFileInfo().getTmpFilename();
FileTransformStats stats = new FileTransformStats();
pentahoSession.setAttribute(statsKey, stats);
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(modelInfo, AgileHelper.getDatabaseMeta());
csvTransformGenerator.setTransformStats(stats);
try {
csvTransformGenerator.dropTable(modelInfo.getStageTableName());
} catch (CsvTransformGeneratorException e) {
// this is ok, the table may not have existed.
// $NON-NLS-1$
logger.info("Could not drop table before staging");
}
csvTransformGenerator.createOrModifyTable(pentahoSession);
// no longer need to truncate the table since we dropped it a few lines up, so just pass false
csvTransformGenerator.loadTable(false, pentahoSession, true);
ArrayList<String> combinedErrors = new ArrayList<String>(modelInfo.getCsvInputErrors());
combinedErrors.addAll(modelInfo.getTableOutputErrors());
if (stats.getErrors() != null && stats.getErrors().size() > 0) {
stats.getErrors().addAll(combinedErrors);
} else {
stats.setErrors(combinedErrors);
}
// wait until it it done
while (!stats.isRowsFinished()) {
Thread.sleep(200);
}
modelerWorkspace.setDomain(modelerService.generateCSVDomain(modelInfo));
modelerWorkspace.getWorkspaceHelper().autoModelFlat(modelerWorkspace);
modelerWorkspace.getWorkspaceHelper().autoModelRelationalFlat(modelerWorkspace);
modelerWorkspace.setModelName(modelInfo.getDatasourceName());
modelerWorkspace.getWorkspaceHelper().populateDomain(modelerWorkspace);
Domain workspaceDomain = modelerWorkspace.getDomain();
XStream xstream = new XStream();
String serializedDto = xstream.toXML(datasourceDto);
workspaceDomain.getLogicalModels().get(0).setProperty("datasourceModel", serializedDto);
workspaceDomain.getLogicalModels().get(0).setProperty("DatasourceType", "CSV");
prepareForSerialization(workspaceDomain);
modelerService.serializeModels(workspaceDomain, modelerWorkspace.getModelName());
stats.setDomain(modelerWorkspace.getDomain());
return stats;
} catch (Exception e) {
logger.error(e.getMessage());
throw e;
} finally {
if (pentahoSession != null) {
pentahoSession.destroy();
}
}
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator in project data-access by pentaho.
the class DSWDatasourceServiceImpl method prepareForSerializaton.
public void prepareForSerializaton(Domain domain) {
/*
* This method is responsible for cleaning up legacy information when
* changing datasource types and also manages CSV files for CSV based
* datasources.
*/
String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
String.valueOf(FileUtils.DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
String path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
LogicalModel logicalModel = domain.getLogicalModels().get(0);
String modelState = (String) logicalModel.getProperty("datasourceModel");
if (modelState != null) {
XStream xs = new XStream();
DatasourceDTO datasource = (DatasourceDTO) xs.fromXML(modelState);
CsvFileInfo csvFileInfo = datasource.getCsvModelInfo().getFileInfo();
String csvFileName = csvFileInfo.getFilename();
if (csvFileName != null) {
// Cleanup logic when updating from CSV datasource to SQL
// datasource.
csvFileInfo.setFilename(null);
csvFileInfo.setTmpFilename(null);
csvFileInfo.setFriendlyFilename(null);
csvFileInfo.setContents(null);
csvFileInfo.setEncoding(null);
// Delete CSV file.
File csvFile = new File(path + File.separatorChar + csvFileName);
if (csvFile.exists()) {
csvFile.delete();
}
// Delete STAGING database table.
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
try {
csvTransformGenerator.dropTable(datasource.getCsvModelInfo().getStageTableName());
} catch (CsvTransformGeneratorException e) {
logger.error(e);
}
}
// Update datasourceModel with the new modelState
modelState = xs.toXML(datasource);
logicalModel.setProperty("datasourceModel", modelState);
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator in project data-access by pentaho.
the class DSWDatasourceServiceImpl method deleteLogicalModel.
public boolean deleteLogicalModel(String domainId, String modelName) throws DatasourceServiceException {
if (!hasDataAccessPermission()) {
// $NON-NLS-1$
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));
return false;
}
String catalogRef = null;
String targetTable = null;
try {
// first load the model
Domain domain = getMetadataDomainRepository().getDomain(domainId);
ModelerWorkspace model = createModelerWorkspace();
model.setDomain(domain);
LogicalModel logicalModel = model.getLogicalModel(ModelerPerspective.ANALYSIS);
if (logicalModel == null) {
logicalModel = model.getLogicalModel(ModelerPerspective.REPORTING);
}
LogicalModel logicalModelRep = model.getLogicalModel(ModelerPerspective.REPORTING);
// CSV related data is bounded to reporting model so need to perform some additional clean up here
if (logicalModelRep != null) {
String modelState = (String) logicalModelRep.getProperty(LM_PROP_DATASOURCE_MODEL);
// TODO: use the edit story's stored info to do this
if ("CSV".equals(logicalModelRep.getProperty(LM_PROP_DATASOURCE_TYPE)) || "true".equalsIgnoreCase((String) logicalModelRep.getProperty(LogicalModel.PROPERTY_TARGET_TABLE_STAGED))) {
targetTable = ((SqlPhysicalTable) domain.getPhysicalModels().get(0).getPhysicalTables().get(0)).getTargetTable();
DatasourceDTO datasource = null;
if (modelState != null) {
datasource = deSerializeModelState(modelState);
}
if (datasource != null) {
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
try {
csvTransformGenerator.dropTable(targetTable);
} catch (CsvTransformGeneratorException e) {
// table might not be there, it's OK that is what we were trying to do anyway
logger.warn(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_UNABLE_TO_DROP_TABLE", targetTable, domainId, e.getLocalizedMessage()), // $NON-NLS-1$
e);
}
String fileName = datasource.getCsvModelInfo().getFileInfo().getFilename();
FileUtils fileService = new FileUtils();
if (fileName != null) {
fileService.deleteFile(fileName);
}
}
}
}
// if associated mondrian file, delete
if (logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF) != null) {
// remove Mondrian schema
IMondrianCatalogService service = PentahoSystem.get(IMondrianCatalogService.class, null);
catalogRef = (String) logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF);
// check if the model is not already removed
if (service.getCatalog(catalogRef, PentahoSessionHolder.getSession()) != null) {
service.removeCatalog(catalogRef, PentahoSessionHolder.getSession());
}
}
getMetadataDomainRepository().removeModel(domainId, logicalModel.getId());
if (logicalModelRep != null && !logicalModelRep.getId().equals(logicalModel.getId())) {
getMetadataDomainRepository().removeModel(domainId, logicalModelRep.getId());
}
// get updated domain
domain = getMetadataDomainRepository().getDomain(domainId);
if (domain == null) {
// already deleted
return true;
}
if (domain.getLogicalModels() == null || domain.getLogicalModels().isEmpty()) {
getMetadataDomainRepository().removeDomain(domainId);
}
} catch (MondrianCatalogServiceException me) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
me);
} catch (DomainStorageException dse) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0017_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0016_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
dse);
} catch (DomainIdNullException dne) {
logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
dne);
}
return true;
}
Aggregations