Search in sources :

Example 16 with ModelInfo

use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.

the class ModelerServiceIT method domainForCsvDatasource_GeneratedWithCsvDatasourceImportStrategy.

@Test
public void domainForCsvDatasource_GeneratedWithCsvDatasourceImportStrategy() throws Exception {
    ModelInfo modelInfo = new ModelInfo();
    ColumnInfo[] columnInfos = new ColumnInfo[] { createColumnInfo("id", "title") };
    modelInfo.setColumns(columnInfos);
    modelerService = spy(modelerService);
    DatabaseMeta dbMeta = mock(DatabaseMeta.class);
    doReturn(dbMeta).when(modelerService).getDatabaseMeta();
    TableModelerSource source = mock(TableModelerSource.class);
    doReturn(source).when(modelerService).createTableModelerSource(any(DatabaseMeta.class), anyString(), anyString(), anyString());
    modelerService.generateCSVDomain(modelInfo);
    verify(modelerService).toColumns(columnInfos);
    // most important thing here, is that domain is generated with CsvDatasourceImportStrategy
    verify(source).generateDomain(any(CsvDatasourceImportStrategy.class));
}
Also used : TableModelerSource(org.pentaho.agilebi.modeler.util.TableModelerSource) ModelInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo) CsvDatasourceImportStrategy(org.pentaho.metadata.automodel.importing.strategy.CsvDatasourceImportStrategy) ColumnInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.ColumnInfo) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Test(org.junit.Test)

Example 17 with ModelInfo

use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.

the class CsvDatasourceServiceImplTest method stageFile_CsvFile.

@Test
public void stageFile_CsvFile() throws Exception {
    String filename = "stageFile_CsvFile.csv";
    File file = createTmpCsvFile(filename);
    file.deleteOnExit();
    try {
        ModelInfo modelInfo = service.stageFile(filename, ",", "\n", true, "utf-8");
        CsvFileInfo fileInfo = modelInfo.getFileInfo();
        assertEquals("One header row", 1, fileInfo.getHeaderRows());
        assertEquals("Header + content row", 2, fileInfo.getContents().size());
        assertEquals(filename, fileInfo.getTmpFilename());
    } finally {
        file.delete();
    }
}
Also used : CsvFileInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo) ModelInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo) Matchers.anyString(org.mockito.Matchers.anyString) File(java.io.File) Test(org.junit.Test)

Example 18 with ModelInfo

use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.

the class DSWDatasourceServiceImplTest method testGenerateQueryDomain.

private void testGenerateQueryDomain(String modelName, String query, List<String> roleList, List<String> userList) throws DatasourceServiceException {
    ModelInfo modelInfo = mock(ModelInfo.class);
    when(modelInfo.getFileInfo()).thenReturn(mock(CsvFileInfo.class));
    DatasourceDTO datasourceDTO = new DatasourceDTO();
    datasourceDTO.setConnectionName(CONNECTION_NAME);
    datasourceDTO.setDatasourceName(CONNECTION_NAME);
    datasourceDTO.setCsvModelInfo(modelInfo);
    DatabaseConnection connectionSpy = spy(new DatabaseConnection());
    connectionSpy.setName(CONNECTION_NAME);
    connectionSpy.setDatabaseName("[database name 接続 <;>!@#$%^&*()_-=+.,]");
    connectionSpy.setDatabasePort("123456");
    connectionSpy.setHostname("[hostname 接続 <;>!@#$%^&*()_-=+.,]");
    connectionSpy.setPassword("[password 接続 <;>!@#$%^&*()_-=+.,]");
    connectionSpy.setUsername("[username 接続 <;>!@#$%^&*()_-=+.,]");
    connectionSpy.setDatabaseType(mock(IDatabaseType.class));
    doReturn(modelerService).when(dswService).createModelerService();
    doReturn(true).when(dswService).hasDataAccessPermission();
    doReturn(roleList).when(dswService).getPermittedRoleList();
    doReturn(userList).when(dswService).getPermittedUserList();
    doReturn(null).when(dswService).getGeoContext();
    doReturn(1).when(dswService).getDefaultAcls();
    QueryDatasourceSummary summary = dswService.generateQueryDomain(modelName, query, connectionSpy, datasourceDTO);
    try {
        verify(dswService).executeQuery("[connection &#25509;&#32154; &lt;;&gt;!@#$%^&amp;*()_-=+.,]", query, "1");
    } catch (Exception e) {
        e.printStackTrace();
    }
    verify(connectionSpy).setName("[connection &#25509;&#32154; &lt;;&gt;!@#$%^&amp;*()_-=+.,]");
    verify(connectionSpy).setDatabaseName("[database name &#25509;&#32154; &lt;;&gt;!@#$%^&amp;*()_-=+.,]");
    verify(connectionSpy, times(2)).setDatabasePort("123456");
    verify(connectionSpy).setHostname("[hostname &#25509;&#32154; &lt;;&gt;!@#$%^&amp;*()_-=+.,]");
    verify(connectionSpy).setPassword("[password &#25509;&#32154; &lt;;&gt;!@#$%^&amp;*()_-=+.,]");
    verify(connectionSpy).setUsername("[username &#25509;&#32154; &lt;;&gt;!@#$%^&amp;*()_-=+.,]");
    assertNotNull(summary);
    assertNotNull(summary.getDomain());
    assertEquals(CONNECTION_NAME, summary.getDomain().getId());
}
Also used : IDatabaseType(org.pentaho.database.model.IDatabaseType) CsvFileInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo) QueryDatasourceSummary(org.pentaho.platform.dataaccess.datasource.wizard.sources.query.QueryDatasourceSummary) ModelInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo) DatabaseConnection(org.pentaho.database.model.DatabaseConnection) IDatabaseConnection(org.pentaho.database.model.IDatabaseConnection) DatasourceDTO(org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO) ModelerException(org.pentaho.agilebi.modeler.ModelerException) DomainStorageException(org.pentaho.metadata.repository.DomainStorageException) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException) DomainIdNullException(org.pentaho.metadata.repository.DomainIdNullException) SQLException(java.sql.SQLException) SqlQueriesNotSupportedException(org.pentaho.platform.dataaccess.datasource.wizard.service.SqlQueriesNotSupportedException) MondrianCatalogServiceException(org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalogServiceException) DomainAlreadyExistsException(org.pentaho.metadata.repository.DomainAlreadyExistsException)

Example 19 with ModelInfo

use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.

the class CsvDatasourceServiceImpl method generateDomain.

public FileTransformStats generateDomain(DatasourceDTO datasourceDto) throws Exception {
    checkPermissions();
    synchronized (lock) {
        ModelInfo modelInfo = datasourceDto.getCsvModelInfo();
        IPentahoSession pentahoSession = null;
        try {
            pentahoSession = PentahoSessionHolder.getSession();
            KettleSystemListener.environmentInit(pentahoSession);
            String statsKey = // $NON-NLS-1$
            FileTransformStats.class.getSimpleName() + "_" + modelInfo.getFileInfo().getTmpFilename();
            FileTransformStats stats = new FileTransformStats();
            pentahoSession.setAttribute(statsKey, stats);
            CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(modelInfo, AgileHelper.getDatabaseMeta());
            csvTransformGenerator.setTransformStats(stats);
            try {
                csvTransformGenerator.dropTable(modelInfo.getStageTableName());
            } catch (CsvTransformGeneratorException e) {
                // this is ok, the table may not have existed.
                // $NON-NLS-1$
                logger.info("Could not drop table before staging");
            }
            csvTransformGenerator.createOrModifyTable(pentahoSession);
            // no longer need to truncate the table since we dropped it a few lines up, so just pass false
            csvTransformGenerator.loadTable(false, pentahoSession, true);
            ArrayList<String> combinedErrors = new ArrayList<String>(modelInfo.getCsvInputErrors());
            combinedErrors.addAll(modelInfo.getTableOutputErrors());
            if (stats.getErrors() != null && stats.getErrors().size() > 0) {
                stats.getErrors().addAll(combinedErrors);
            } else {
                stats.setErrors(combinedErrors);
            }
            // wait until it it done
            while (!stats.isRowsFinished()) {
                Thread.sleep(200);
            }
            modelerWorkspace.setDomain(modelerService.generateCSVDomain(modelInfo));
            modelerWorkspace.getWorkspaceHelper().autoModelFlat(modelerWorkspace);
            modelerWorkspace.getWorkspaceHelper().autoModelRelationalFlat(modelerWorkspace);
            modelerWorkspace.setModelName(modelInfo.getDatasourceName());
            modelerWorkspace.getWorkspaceHelper().populateDomain(modelerWorkspace);
            Domain workspaceDomain = modelerWorkspace.getDomain();
            XStream xstream = new XStream();
            String serializedDto = xstream.toXML(datasourceDto);
            workspaceDomain.getLogicalModels().get(0).setProperty("datasourceModel", serializedDto);
            workspaceDomain.getLogicalModels().get(0).setProperty("DatasourceType", "CSV");
            prepareForSerialization(workspaceDomain);
            modelerService.serializeModels(workspaceDomain, modelerWorkspace.getModelName());
            stats.setDomain(modelerWorkspace.getDomain());
            return stats;
        } catch (Exception e) {
            logger.error(e.getMessage());
            throw e;
        } finally {
            if (pentahoSession != null) {
                pentahoSession.destroy();
            }
        }
    }
}
Also used : ModelInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo) CsvTransformGeneratorException(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException) IPentahoSession(org.pentaho.platform.api.engine.IPentahoSession) XStream(com.thoughtworks.xstream.XStream) CsvTransformGenerator(org.pentaho.platform.dataaccess.datasource.wizard.service.agile.CsvTransformGenerator) ArrayList(java.util.ArrayList) Domain(org.pentaho.metadata.model.Domain) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) CsvTransformGeneratorException(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException) FileTransformStats(org.pentaho.platform.dataaccess.datasource.wizard.sources.csv.FileTransformStats)

Example 20 with ModelInfo

use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.

the class CsvUtils method getFileContents.

// $NON-NLS-1$ //$NON-NLS-2$
public ModelInfo getFileContents(String project, String name, String delimiter, String enclosure, int rows, boolean isFirstRowHeader, String encoding) throws Exception {
    String path;
    if (name.endsWith(".tmp")) {
        // $NON-NLS-1$
        path = PentahoSystem.getApplicationContext().getSolutionPath(TMP_FILE_PATH);
    } else {
        String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
        String.valueOf(DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
        path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
    }
    String fileLocation = path + name;
    ModelInfo result = new ModelInfo();
    CsvFileInfo fileInfo = new CsvFileInfo();
    fileInfo.setTmpFilename(name);
    result.setFileInfo(fileInfo);
    fileInfo.setContents(getLinesList(fileLocation, rows, encoding));
    fileInfo.setDelimiter(delimiter);
    fileInfo.setEnclosure(enclosure);
    fileInfo.setHeaderRows(0);
    // now try to generate some columns
    return result;
}
Also used : CsvFileInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo) ModelInfo(org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo)

Aggregations

ModelInfo (org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo)23 IPentahoSession (org.pentaho.platform.api.engine.IPentahoSession)12 StandaloneSession (org.pentaho.platform.engine.core.system.StandaloneSession)11 CsvTransformGeneratorException (org.pentaho.platform.dataaccess.datasource.wizard.models.CsvTransformGeneratorException)10 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)6 CsvFileInfo (org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo)6 Test (org.junit.Test)4 File (java.io.File)3 ColumnInfo (org.pentaho.platform.dataaccess.datasource.wizard.models.ColumnInfo)3 DatasourceServiceException (org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)3 XStream (com.thoughtworks.xstream.XStream)2 FileNotFoundException (java.io.FileNotFoundException)2 IOException (java.io.IOException)2 Date (java.util.Date)2 Matchers.anyString (org.mockito.Matchers.anyString)2 Database (org.pentaho.di.core.database.Database)2 CsvUtils (org.pentaho.platform.dataaccess.datasource.wizard.csv.CsvUtils)2 DataRow (org.pentaho.platform.dataaccess.datasource.wizard.models.DataRow)2 DatasourceDTO (org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO)2 FileTransformStats (org.pentaho.platform.dataaccess.datasource.wizard.sources.csv.FileTransformStats)2