Search in sources :

Example 1 with CsvTransformGenerator

use of org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator in project data-access by pentaho.

the class CsvDatasourceServiceImpl method generateDomain.

public FileTransformStats generateDomain(DatasourceDTO datasourceDto) throws Exception {
    checkPermissions();
    synchronized (lock) {
        ModelInfo modelInfo = datasourceDto.getCsvModelInfo();
        IPentahoSession pentahoSession = null;
        try {
            pentahoSession = PentahoSessionHolder.getSession();
            KettleSystemListener.environmentInit(pentahoSession);
            String statsKey = // $NON-NLS-1$
            FileTransformStats.class.getSimpleName() + "_" + modelInfo.getFileInfo().getTmpFilename();
            FileTransformStats stats = new FileTransformStats();
            pentahoSession.setAttribute(statsKey, stats);
            CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(modelInfo, AgileHelper.getDatabaseMeta());
            csvTransformGenerator.setTransformStats(stats);
            try {
                csvTransformGenerator.dropTable(modelInfo.getStageTableName());
            } catch (CsvTransformGeneratorException e) {
                // this is ok, the table may not have existed.
                // $NON-NLS-1$
                logger.info("Could not drop table before staging");
            }
            csvTransformGenerator.createOrModifyTable(pentahoSession);
            // no longer need to truncate the table since we dropped it a few lines up, so just pass false
            csvTransformGenerator.loadTable(false, pentahoSession, true);
            ArrayList<String> combinedErrors = new ArrayList<String>(modelInfo.getCsvInputErrors());
            combinedErrors.addAll(modelInfo.getTableOutputErrors());
            if (stats.getErrors() != null && stats.getErrors().size() > 0) {
                stats.getErrors().addAll(combinedErrors);
            } else {
                stats.setErrors(combinedErrors);
            }
            // wait until it it done
            while (!stats.isRowsFinished()) {
                Thread.sleep(200);
            }
            modelerWorkspace.setDomain(modelerService.generateCSVDomain(modelInfo));
            modelerWorkspace.getWorkspaceHelper().autoModelFlat(modelerWorkspace);
            modelerWorkspace.getWorkspaceHelper().autoModelRelationalFlat(modelerWorkspace);
            modelerWorkspace.setModelName(modelInfo.getDatasourceName());
            modelerWorkspace.getWorkspaceHelper().populateDomain(modelerWorkspace);
            Domain workspaceDomain = modelerWorkspace.getDomain();
            XStream xstream = new XStream();
            String serializedDto = xstream.toXML(datasourceDto);
            workspaceDomain.getLogicalModels().get(0).setProperty("datasourceModel", serializedDto);
            workspaceDomain.getLogicalModels().get(0).setProperty("DatasourceType", "CSV");
            prepareForSerialization(workspaceDomain);
            modelerService.serializeModels(workspaceDomain, modelerWorkspace.getModelName());
            stats.setDomain(modelerWorkspace.getDomain());
            return stats;
        } catch (Exception e) {
            logger.error(e.getMessage());
            throw e;
        } finally {
            if (pentahoSession != null) {
                pentahoSession.destroy();
            }
        }
    }
}
Also used : ModelInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo) CsvTransformGeneratorException(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException) IPentahoSession(org.pentaho.platform.api.engine.IPentahoSession) XStream(com.thoughtworks.xstream.XStream) CsvTransformGenerator(org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator) ArrayList(java.util.ArrayList) Domain(org.pentaho.metadata.model.Domain) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) CsvTransformGeneratorException(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException) FileTransformStats(org.pentaho.platform.dataaccess.datasource.wizard.sources.csv.FileTransformStats)

Example 2 with CsvTransformGenerator

use of org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator in project data-access by pentaho.

the class DSWDatasourceServiceImpl method prepareForSerializaton.

public void prepareForSerializaton(Domain domain) {
    /*
     * This method is responsible for cleaning up legacy information when
     * changing datasource types and also manages CSV files for CSV based
     * datasources.
     */
    String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
    String.valueOf(FileUtils.DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
    String path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
    LogicalModel logicalModel = domain.getLogicalModels().get(0);
    String modelState = (String) logicalModel.getProperty("datasourceModel");
    if (modelState != null) {
        XStream xs = new XStream();
        DatasourceDTO datasource = (DatasourceDTO) xs.fromXML(modelState);
        CsvFileInfo csvFileInfo = datasource.getCsvModelInfo().getFileInfo();
        String csvFileName = csvFileInfo.getFilename();
        if (csvFileName != null) {
            // Cleanup logic when updating from CSV datasource to SQL
            // datasource.
            csvFileInfo.setFilename(null);
            csvFileInfo.setTmpFilename(null);
            csvFileInfo.setFriendlyFilename(null);
            csvFileInfo.setContents(null);
            csvFileInfo.setEncoding(null);
            // Delete CSV file.
            File csvFile = new File(path + File.separatorChar + csvFileName);
            if (csvFile.exists()) {
                csvFile.delete();
            }
            // Delete STAGING database table.
            CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
            try {
                csvTransformGenerator.dropTable(datasource.getCsvModelInfo().getStageTableName());
            } catch (CsvTransformGeneratorException e) {
                logger.error(e);
            }
        }
        // Update datasourceModel with the new modelState
        modelState = xs.toXML(datasource);
        logicalModel.setProperty("datasourceModel", modelState);
    }
}
Also used : LogicalModel(org.pentaho.metadata.model.LogicalModel) CsvFileInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo) CsvTransformGeneratorException(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException) XStream(com.thoughtworks.xstream.XStream) CsvTransformGenerator(org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator) DatasourceDTO(org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO) File(java.io.File)

Example 3 with CsvTransformGenerator

use of org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator in project data-access by pentaho.

the class DSWDatasourceServiceImpl method deleteLogicalModel.

public boolean deleteLogicalModel(String domainId, String modelName) throws DatasourceServiceException {
    if (!hasDataAccessPermission()) {
        // $NON-NLS-1$
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));
        return false;
    }
    String catalogRef = null;
    String targetTable = null;
    try {
        // first load the model
        Domain domain = getMetadataDomainRepository().getDomain(domainId);
        ModelerWorkspace model = createModelerWorkspace();
        model.setDomain(domain);
        LogicalModel logicalModel = model.getLogicalModel(ModelerPerspective.ANALYSIS);
        if (logicalModel == null) {
            logicalModel = model.getLogicalModel(ModelerPerspective.REPORTING);
        }
        LogicalModel logicalModelRep = model.getLogicalModel(ModelerPerspective.REPORTING);
        // CSV related data is bounded to reporting model so need to perform some additional clean up here
        if (logicalModelRep != null) {
            String modelState = (String) logicalModelRep.getProperty(LM_PROP_DATASOURCE_MODEL);
            // TODO: use the edit story's stored info to do this
            if ("CSV".equals(logicalModelRep.getProperty(LM_PROP_DATASOURCE_TYPE)) || "true".equalsIgnoreCase((String) logicalModelRep.getProperty(LogicalModel.PROPERTY_TARGET_TABLE_STAGED))) {
                targetTable = ((SqlPhysicalTable) domain.getPhysicalModels().get(0).getPhysicalTables().get(0)).getTargetTable();
                DatasourceDTO datasource = null;
                if (modelState != null) {
                    datasource = deSerializeModelState(modelState);
                }
                if (datasource != null) {
                    CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
                    try {
                        csvTransformGenerator.dropTable(targetTable);
                    } catch (CsvTransformGeneratorException e) {
                        // table might not be there, it's OK that is what we were trying to do anyway
                        logger.warn(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_UNABLE_TO_DROP_TABLE", targetTable, domainId, e.getLocalizedMessage()), // $NON-NLS-1$
                        e);
                    }
                    String fileName = datasource.getCsvModelInfo().getFileInfo().getFilename();
                    FileUtils fileService = new FileUtils();
                    if (fileName != null) {
                        fileService.deleteFile(fileName);
                    }
                }
            }
        }
        // if associated mondrian file, delete
        if (logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF) != null) {
            // remove Mondrian schema
            IMondrianCatalogService service = PentahoSystem.get(IMondrianCatalogService.class, null);
            catalogRef = (String) logicalModel.getProperty(LM_PROP_MONDRIAN_CATALOG_REF);
            // check if the model is not already removed
            if (service.getCatalog(catalogRef, PentahoSessionHolder.getSession()) != null) {
                service.removeCatalog(catalogRef, PentahoSessionHolder.getSession());
            }
        }
        getMetadataDomainRepository().removeModel(domainId, logicalModel.getId());
        if (logicalModelRep != null && !logicalModelRep.getId().equals(logicalModel.getId())) {
            getMetadataDomainRepository().removeModel(domainId, logicalModelRep.getId());
        }
        // get updated domain
        domain = getMetadataDomainRepository().getDomain(domainId);
        if (domain == null) {
            // already deleted
            return true;
        }
        if (domain.getLogicalModels() == null || domain.getLogicalModels().isEmpty()) {
            getMetadataDomainRepository().removeDomain(domainId);
        }
    } catch (MondrianCatalogServiceException me) {
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
        me);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0020_UNABLE_TO_DELETE_CATALOG", catalogRef, domainId, me.getLocalizedMessage()), // $NON-NLS-1$
        me);
    } catch (DomainStorageException dse) {
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0017_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
        dse);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0016_UNABLE_TO_STORE_DOMAIN", domainId, dse.getLocalizedMessage()), // $NON-NLS-1$
        dse);
    } catch (DomainIdNullException dne) {
        logger.error(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
        dne);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceImpl.ERROR_0019_DOMAIN_IS_NULL", dne.getLocalizedMessage()), // $NON-NLS-1$
        dne);
    }
    return true;
}
Also used : CsvTransformGeneratorException(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException) FileUtils(org.pentaho.platform.dataaccess.datasource.wizard.csv.FileUtils) DomainIdNullException(org.pentaho.metadata.repository.DomainIdNullException) DatasourceDTO(org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO) IMondrianCatalogService(org.pentaho.platform.plugin.action.mondrian.catalog.IMondrianCatalogService) LogicalModel(org.pentaho.metadata.model.LogicalModel) MondrianCatalogServiceException(org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalogServiceException) DomainStorageException(org.pentaho.metadata.repository.DomainStorageException) CsvTransformGenerator(org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator) Domain(org.pentaho.metadata.model.Domain) ModelerWorkspace(org.pentaho.agilebi.modeler.ModelerWorkspace) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Aggregations

CsvTransformGeneratorException (org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException)3 CsvTransformGenerator (org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator)3 XStream (com.thoughtworks.xstream.XStream)2 Domain (org.pentaho.metadata.model.Domain)2 LogicalModel (org.pentaho.metadata.model.LogicalModel)2 DatasourceDTO (org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO)2 DatasourceServiceException (org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)2 File (java.io.File)1 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 ModelerWorkspace (org.pentaho.agilebi.modeler.ModelerWorkspace)1 DomainIdNullException (org.pentaho.metadata.repository.DomainIdNullException)1 DomainStorageException (org.pentaho.metadata.repository.DomainStorageException)1 IPentahoSession (org.pentaho.platform.api.engine.IPentahoSession)1 FileUtils (org.pentaho.platform.dataaccess.datasource.wizard.csv.FileUtils)1 CsvFileInfo (org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo)1 ModelInfo (org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo)1 FileTransformStats (org.pentaho.platform.dataaccess.datasource.wizard.sources.csv.FileTransformStats)1 IMondrianCatalogService (org.pentaho.platform.plugin.action.mondrian.catalog.IMondrianCatalogService)1