Search in sources :

Example 1 with SQLMetaData

use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.

the class DatasourceInMemoryServiceHelper method getSerializeableResultSet.

public static SerializedResultSet getSerializeableResultSet(String connectionName, String query, int rowLimit, IPentahoSession session) throws DatasourceServiceException {
    SerializedResultSet serializedResultSet = null;
    SQLConnection sqlConnection = null;
    try {
        sqlConnection = getConnection(connectionName);
        sqlConnection.setMaxRows(rowLimit);
        sqlConnection.setReadOnly(true);
        IPentahoResultSet resultSet = sqlConnection.executeQuery(query);
        MarshallableResultSet marshallableResultSet = new MarshallableResultSet();
        marshallableResultSet.setResultSet(resultSet);
        IPentahoMetaData ipmd = resultSet.getMetaData();
        if (ipmd instanceof SQLMetaData) {
            // Hack warning - get JDBC column types
            // TODO: Need to generalize this amongst all IPentahoResultSets
            SQLMetaData smd = (SQLMetaData) ipmd;
            int[] columnTypes = smd.getJDBCColumnTypes();
            List<List<String>> data = new ArrayList<List<String>>();
            for (MarshallableRow row : marshallableResultSet.getRows()) {
                String[] rowData = row.getCell();
                List<String> rowDataList = new ArrayList<String>(rowData.length);
                for (int j = 0; j < rowData.length; j++) {
                    rowDataList.add(rowData[j]);
                }
                data.add(rowDataList);
            }
            serializedResultSet = new SerializedResultSet(columnTypes, marshallableResultSet.getColumnNames().getColumnName(), data);
        }
    } catch (Exception e) {
        logger.error(Messages.getErrorString("DatasourceInMemoryServiceHelper.ERROR_0005_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
        e);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceInMemoryServiceHelper.ERROR_0005_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
        e);
    } finally {
        if (sqlConnection != null) {
            sqlConnection.close();
        }
    }
    return serializedResultSet;
}
Also used : MarshallableResultSet(org.pentaho.commons.connection.marshal.MarshallableResultSet) SQLConnection(org.pentaho.platform.plugin.services.connections.sql.SQLConnection) ArrayList(java.util.ArrayList) SerializedResultSet(org.pentaho.platform.dataaccess.datasource.beans.SerializedResultSet) IPentahoMetaData(org.pentaho.commons.connection.IPentahoMetaData) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException) SQLException(java.sql.SQLException) DatabaseDialectException(org.pentaho.database.DatabaseDialectException) ConnectionServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.ConnectionServiceException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) SQLMetaData(org.pentaho.platform.plugin.services.connections.sql.SQLMetaData) IPentahoResultSet(org.pentaho.commons.connection.IPentahoResultSet) MarshallableRow(org.pentaho.commons.connection.marshal.MarshallableRow) ArrayList(java.util.ArrayList) List(java.util.List) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Example 2 with SQLMetaData

use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.

the class DatasourceServiceHelper method getSerializeableResultSet.

public static SerializedResultSet getSerializeableResultSet(String connectionName, String query, int rowLimit, IPentahoSession session) throws DatasourceServiceException {
    SerializedResultSet serializedResultSet = null;
    SQLConnection sqlConnection = null;
    try {
        sqlConnection = (SQLConnection) PentahoConnectionFactory.getConnection(IPentahoConnection.SQL_DATASOURCE, connectionName, PentahoSessionHolder.getSession(), null);
        sqlConnection.setMaxRows(rowLimit);
        sqlConnection.setReadOnly(true);
        IPentahoResultSet resultSet = sqlConnection.executeQuery(query);
        // $NON-NLS-1$
        logger.debug("ResultSet is not scrollable. Copying into memory");
        if (!resultSet.isScrollable()) {
            resultSet = convertToMemoryResultSet(resultSet);
        }
        MarshallableResultSet marshallableResultSet = new MarshallableResultSet();
        marshallableResultSet.setResultSet(resultSet);
        IPentahoMetaData ipmd = resultSet.getMetaData();
        int[] columnTypes = null;
        if (ipmd instanceof SQLMetaData) {
            SQLMetaData smd = (SQLMetaData) ipmd;
            columnTypes = smd.getJDBCColumnTypes();
        } else if (ipmd instanceof MemoryMetaData) {
            MemoryMetaData mmd = (MemoryMetaData) ipmd;
            String[] columnTypesAsString = mmd.getColumnTypes();
            columnTypes = new int[columnTypesAsString.length];
            for (int i = 0; i < columnTypesAsString.length; i++) {
                columnTypes[i] = Integer.parseInt(columnTypesAsString[i]);
            }
        }
        if (columnTypes != null) {
            // Hack warning - get JDBC column types
            // TODO: Need to generalize this amongst all IPentahoResultSets
            List<List<String>> data = new ArrayList<List<String>>();
            for (MarshallableRow row : marshallableResultSet.getRows()) {
                String[] rowData = row.getCell();
                List<String> rowDataList = new ArrayList<String>(rowData.length);
                for (int j = 0; j < rowData.length; j++) {
                    rowDataList.add(rowData[j]);
                }
                data.add(rowDataList);
            }
            serializedResultSet = new SerializedResultSet(columnTypes, marshallableResultSet.getColumnNames().getColumnName(), data);
        }
    } catch (Exception e) {
        logger.error(Messages.getErrorString("DatasourceServiceHelper.ERROR_0001_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
        e);
        throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceHelper.ERROR_0001_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
        e);
    } finally {
        if (sqlConnection != null) {
            sqlConnection.close();
        }
    }
    return serializedResultSet;
}
Also used : MarshallableResultSet(org.pentaho.commons.connection.marshal.MarshallableResultSet) SQLConnection(org.pentaho.platform.plugin.services.connections.sql.SQLConnection) ArrayList(java.util.ArrayList) SerializedResultSet(org.pentaho.platform.dataaccess.datasource.beans.SerializedResultSet) MemoryMetaData(org.pentaho.commons.connection.memory.MemoryMetaData) IPentahoMetaData(org.pentaho.commons.connection.IPentahoMetaData) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException) ModelerException(org.pentaho.agilebi.modeler.ModelerException) SQLException(java.sql.SQLException) SQLMetaData(org.pentaho.platform.plugin.services.connections.sql.SQLMetaData) IPentahoResultSet(org.pentaho.commons.connection.IPentahoResultSet) MarshallableRow(org.pentaho.commons.connection.marshal.MarshallableRow) ArrayList(java.util.ArrayList) List(java.util.List) DatasourceServiceException(org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)

Example 3 with SQLMetaData

use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.

the class DatasourceServiceHelper method convertToMemoryResultSet.

/**
 * Convert the live result set to memory result set.
 *
 * @param resultSet
 * @return
 */
private static IPentahoResultSet convertToMemoryResultSet(IPentahoResultSet resultSet) throws SQLException {
    MemoryResultSet cachedResultSet = null;
    try {
        IPentahoMetaData meta = resultSet.getMetaData();
        Object[][] columnHeaders = meta.getColumnHeaders();
        MemoryMetaData cachedMetaData = new MemoryMetaData(columnHeaders, null);
        String[] colTypesAsString;
        // If the IPentahoMetaData is an instanceof SQLMetaData then get the column types from the metadata
        if (meta instanceof SQLMetaData) {
            SQLMetaData sqlMeta = (SQLMetaData) meta;
            // Column Types in SQLMetaData are int. MemoryMetaData stores column types as string. So we will store them
            // as string in MemoryMetaData
            int[] colTypes = sqlMeta.getJDBCColumnTypes();
            colTypesAsString = new String[colTypes.length];
            for (int i = 0; i < colTypes.length; i++) {
                colTypesAsString[i] = Integer.toString(colTypes[i]);
            }
            cachedMetaData.setColumnTypes(colTypesAsString);
        }
        cachedResultSet = new MemoryResultSet(cachedMetaData);
        Object[] rowObjects = resultSet.next();
        while (rowObjects != null) {
            cachedResultSet.addRow(rowObjects);
            rowObjects = resultSet.next();
        }
    } finally {
        resultSet.close();
    }
    return cachedResultSet;
}
Also used : SQLMetaData(org.pentaho.platform.plugin.services.connections.sql.SQLMetaData) MemoryMetaData(org.pentaho.commons.connection.memory.MemoryMetaData) IPentahoMetaData(org.pentaho.commons.connection.IPentahoMetaData) MemoryResultSet(org.pentaho.commons.connection.memory.MemoryResultSet)

Example 4 with SQLMetaData

use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.

the class DSWDatasourceServiceImplTest method setUp.

@Before
public void setUp() throws Exception {
    SqlDataSource dataSource = new SqlDataSource();
    dataSource.setDatabaseName(CONNECTION_NAME);
    SqlPhysicalTable sqlTable = new SqlPhysicalTable();
    sqlTable.setTargetTable(VALID_QUERY);
    SqlPhysicalModel sqlModel = new SqlPhysicalModel();
    sqlModel.addPhysicalTable(sqlTable);
    sqlModel.setDatasource(dataSource);
    analysisModel = new LogicalModel();
    analysisModel.setId(LOGICAL_MODEL_ID_ANALYSIS);
    analysisModel.setProperty(DSWDatasourceServiceImpl.LM_PROP_VISIBLE, LOGICAL_MODEL_CONTEXTNAME);
    reportingModel = new LogicalModel();
    reportingModel.setId(LOGICAL_MODEL_ID_REPORTING);
    domain2Models = new Domain();
    domain2Models.setId(DOMAIN_ID_2MODELS);
    domain2Models.addLogicalModel(analysisModel);
    domain2Models.addLogicalModel(reportingModel);
    domain2Models.setLocales(Arrays.asList(new LocaleType("en_US", "Test locale")));
    domain2Models.addPhysicalModel(sqlModel);
    Set<String> domains = new TreeSet<String>();
    domains.add(DOMAIN_ID_2MODELS);
    doReturn(domain2Models).when(domainRepository).getDomain(DOMAIN_ID_2MODELS);
    doReturn(domains).when(domainRepository).getDomainIds();
    doAnswer(new Answer<Object>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            final String modelId = (String) invocation.getArguments()[1];
            final LogicalModel modelToRemove = domain2Models.findLogicalModel(modelId);
            domain2Models.getLogicalModels().remove(modelToRemove);
            return null;
        }
    }).when(domainRepository).removeModel(anyString(), anyString());
    workspace2Models = mock(ModelerWorkspace.class);
    when(workspace2Models.getLogicalModel(ModelerPerspective.ANALYSIS)).thenReturn(analysisModel);
    when(workspace2Models.getLogicalModel(ModelerPerspective.REPORTING)).thenReturn(reportingModel);
    dswService = spy(new DSWDatasourceServiceImpl(mock(ConnectionServiceImpl.class)));
    doNothing().when(dswService).checkSqlQueriesSupported(anyString());
    dswService.setMetadataDomainRepository(domainRepository);
    Object[][] coumnHeaders = new Object[][] { columns };
    SQLMetaData metadata = mock(SQLMetaData.class);
    when(metadata.getColumnHeaders()).thenReturn(coumnHeaders);
    when(metadata.getJDBCColumnTypes()).thenReturn(columnTypes);
    IPentahoResultSet resultSet = mock(IPentahoResultSet.class);
    when(resultSet.getMetaData()).thenReturn(metadata);
    doReturn(resultSet).when(sqlConnection).executeQuery(matches("(.*" + VALID_QUERY + ".*)"));
    when(sqlConnection.executeQuery(matches("(.*" + QUERY_COLUMN_ALREADY_EXIST + ".*)"))).thenThrow(new SQLException("Reason", "S0021", 21));
    doReturn(nativeConnection).when(sqlConnection).getNativeConnection();
    MondrianCatalog catalog = mock(MondrianCatalog.class);
    doReturn(catalog).when(mondrianService).getCatalog(anyString(), any(IPentahoSession.class));
    pentahoObjectFactory = mock(IPentahoObjectFactory.class);
    when(pentahoObjectFactory.objectDefined(anyString())).thenReturn(true);
    when(pentahoObjectFactory.get(this.anyClass(), anyString(), any(IPentahoSession.class))).thenAnswer(new Answer<Object>() {

        @Override
        public Object answer(InvocationOnMock invocation) throws Throwable {
            if (invocation.getArguments()[0].equals(IMondrianCatalogService.class)) {
                return mondrianService;
            }
            if (invocation.getArguments()[0].equals(IPentahoConnection.class)) {
                return sqlConnection;
            }
            if (invocation.getArguments()[0].equals(IMetadataDomainRepository.class)) {
                return domainRepository;
            }
            return null;
        }
    });
    PentahoSystem.registerObjectFactory(pentahoObjectFactory);
    IPentahoSession pentahoSessionMock = mock(IPentahoSession.class);
    when(pentahoSessionMock.getName()).thenReturn("sessionName");
    PentahoSessionHolder.setSession(pentahoSessionMock);
}
Also used : MondrianCatalog(org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalog) SQLException(java.sql.SQLException) IPentahoObjectFactory(org.pentaho.platform.api.engine.IPentahoObjectFactory) Mockito.anyString(org.mockito.Mockito.anyString) SqlPhysicalModel(org.pentaho.metadata.model.SqlPhysicalModel) SqlPhysicalTable(org.pentaho.metadata.model.SqlPhysicalTable) IPentahoConnection(org.pentaho.commons.connection.IPentahoConnection) LogicalModel(org.pentaho.metadata.model.LogicalModel) TreeSet(java.util.TreeSet) LocaleType(org.pentaho.metadata.model.concept.types.LocaleType) IPentahoSession(org.pentaho.platform.api.engine.IPentahoSession) IMetadataDomainRepository(org.pentaho.metadata.repository.IMetadataDomainRepository) IMondrianCatalogService(org.pentaho.platform.plugin.action.mondrian.catalog.IMondrianCatalogService) SQLMetaData(org.pentaho.platform.plugin.services.connections.sql.SQLMetaData) IPentahoResultSet(org.pentaho.commons.connection.IPentahoResultSet) InvocationOnMock(org.mockito.invocation.InvocationOnMock) SqlDataSource(org.pentaho.metadata.model.SqlDataSource) Domain(org.pentaho.metadata.model.Domain) ModelerWorkspace(org.pentaho.agilebi.modeler.ModelerWorkspace) Before(org.junit.Before)

Aggregations

SQLMetaData (org.pentaho.platform.plugin.services.connections.sql.SQLMetaData)4 SQLException (java.sql.SQLException)3 IPentahoMetaData (org.pentaho.commons.connection.IPentahoMetaData)3 IPentahoResultSet (org.pentaho.commons.connection.IPentahoResultSet)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 MarshallableResultSet (org.pentaho.commons.connection.marshal.MarshallableResultSet)2 MarshallableRow (org.pentaho.commons.connection.marshal.MarshallableRow)2 MemoryMetaData (org.pentaho.commons.connection.memory.MemoryMetaData)2 SerializedResultSet (org.pentaho.platform.dataaccess.datasource.beans.SerializedResultSet)2 DatasourceServiceException (org.pentaho.platform.dataaccess.datasource.wizard.service.DatasourceServiceException)2 SQLConnection (org.pentaho.platform.plugin.services.connections.sql.SQLConnection)2 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 TreeSet (java.util.TreeSet)1 Before (org.junit.Before)1 Mockito.anyString (org.mockito.Mockito.anyString)1 InvocationOnMock (org.mockito.invocation.InvocationOnMock)1 ModelerException (org.pentaho.agilebi.modeler.ModelerException)1 ModelerWorkspace (org.pentaho.agilebi.modeler.ModelerWorkspace)1