use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.
the class DatasourceInMemoryServiceHelper method getSerializeableResultSet.
public static SerializedResultSet getSerializeableResultSet(String connectionName, String query, int rowLimit, IPentahoSession session) throws DatasourceServiceException {
SerializedResultSet serializedResultSet = null;
SQLConnection sqlConnection = null;
try {
sqlConnection = getConnection(connectionName);
sqlConnection.setMaxRows(rowLimit);
sqlConnection.setReadOnly(true);
IPentahoResultSet resultSet = sqlConnection.executeQuery(query);
MarshallableResultSet marshallableResultSet = new MarshallableResultSet();
marshallableResultSet.setResultSet(resultSet);
IPentahoMetaData ipmd = resultSet.getMetaData();
if (ipmd instanceof SQLMetaData) {
// Hack warning - get JDBC column types
// TODO: Need to generalize this amongst all IPentahoResultSets
SQLMetaData smd = (SQLMetaData) ipmd;
int[] columnTypes = smd.getJDBCColumnTypes();
List<List<String>> data = new ArrayList<List<String>>();
for (MarshallableRow row : marshallableResultSet.getRows()) {
String[] rowData = row.getCell();
List<String> rowDataList = new ArrayList<String>(rowData.length);
for (int j = 0; j < rowData.length; j++) {
rowDataList.add(rowData[j]);
}
data.add(rowDataList);
}
serializedResultSet = new SerializedResultSet(columnTypes, marshallableResultSet.getColumnNames().getColumnName(), data);
}
} catch (Exception e) {
logger.error(Messages.getErrorString("DatasourceInMemoryServiceHelper.ERROR_0005_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
e);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceInMemoryServiceHelper.ERROR_0005_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
e);
} finally {
if (sqlConnection != null) {
sqlConnection.close();
}
}
return serializedResultSet;
}
use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.
the class DatasourceServiceHelper method getSerializeableResultSet.
public static SerializedResultSet getSerializeableResultSet(String connectionName, String query, int rowLimit, IPentahoSession session) throws DatasourceServiceException {
SerializedResultSet serializedResultSet = null;
SQLConnection sqlConnection = null;
try {
sqlConnection = (SQLConnection) PentahoConnectionFactory.getConnection(IPentahoConnection.SQL_DATASOURCE, connectionName, PentahoSessionHolder.getSession(), null);
sqlConnection.setMaxRows(rowLimit);
sqlConnection.setReadOnly(true);
IPentahoResultSet resultSet = sqlConnection.executeQuery(query);
// $NON-NLS-1$
logger.debug("ResultSet is not scrollable. Copying into memory");
if (!resultSet.isScrollable()) {
resultSet = convertToMemoryResultSet(resultSet);
}
MarshallableResultSet marshallableResultSet = new MarshallableResultSet();
marshallableResultSet.setResultSet(resultSet);
IPentahoMetaData ipmd = resultSet.getMetaData();
int[] columnTypes = null;
if (ipmd instanceof SQLMetaData) {
SQLMetaData smd = (SQLMetaData) ipmd;
columnTypes = smd.getJDBCColumnTypes();
} else if (ipmd instanceof MemoryMetaData) {
MemoryMetaData mmd = (MemoryMetaData) ipmd;
String[] columnTypesAsString = mmd.getColumnTypes();
columnTypes = new int[columnTypesAsString.length];
for (int i = 0; i < columnTypesAsString.length; i++) {
columnTypes[i] = Integer.parseInt(columnTypesAsString[i]);
}
}
if (columnTypes != null) {
// Hack warning - get JDBC column types
// TODO: Need to generalize this amongst all IPentahoResultSets
List<List<String>> data = new ArrayList<List<String>>();
for (MarshallableRow row : marshallableResultSet.getRows()) {
String[] rowData = row.getCell();
List<String> rowDataList = new ArrayList<String>(rowData.length);
for (int j = 0; j < rowData.length; j++) {
rowDataList.add(rowData[j]);
}
data.add(rowDataList);
}
serializedResultSet = new SerializedResultSet(columnTypes, marshallableResultSet.getColumnNames().getColumnName(), data);
}
} catch (Exception e) {
logger.error(Messages.getErrorString("DatasourceServiceHelper.ERROR_0001_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
e);
throw new DatasourceServiceException(Messages.getErrorString("DatasourceServiceHelper.ERROR_0001_QUERY_VALIDATION_FAILED", e.getLocalizedMessage()), // $NON-NLS-1$
e);
} finally {
if (sqlConnection != null) {
sqlConnection.close();
}
}
return serializedResultSet;
}
use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.
the class DatasourceServiceHelper method convertToMemoryResultSet.
/**
* Convert the live result set to memory result set.
*
* @param resultSet
* @return
*/
private static IPentahoResultSet convertToMemoryResultSet(IPentahoResultSet resultSet) throws SQLException {
MemoryResultSet cachedResultSet = null;
try {
IPentahoMetaData meta = resultSet.getMetaData();
Object[][] columnHeaders = meta.getColumnHeaders();
MemoryMetaData cachedMetaData = new MemoryMetaData(columnHeaders, null);
String[] colTypesAsString;
// If the IPentahoMetaData is an instanceof SQLMetaData then get the column types from the metadata
if (meta instanceof SQLMetaData) {
SQLMetaData sqlMeta = (SQLMetaData) meta;
// Column Types in SQLMetaData are int. MemoryMetaData stores column types as string. So we will store them
// as string in MemoryMetaData
int[] colTypes = sqlMeta.getJDBCColumnTypes();
colTypesAsString = new String[colTypes.length];
for (int i = 0; i < colTypes.length; i++) {
colTypesAsString[i] = Integer.toString(colTypes[i]);
}
cachedMetaData.setColumnTypes(colTypesAsString);
}
cachedResultSet = new MemoryResultSet(cachedMetaData);
Object[] rowObjects = resultSet.next();
while (rowObjects != null) {
cachedResultSet.addRow(rowObjects);
rowObjects = resultSet.next();
}
} finally {
resultSet.close();
}
return cachedResultSet;
}
use of org.pentaho.platform.plugin.services.connections.sql.SQLMetaData in project data-access by pentaho.
the class DSWDatasourceServiceImplTest method setUp.
@Before
public void setUp() throws Exception {
SqlDataSource dataSource = new SqlDataSource();
dataSource.setDatabaseName(CONNECTION_NAME);
SqlPhysicalTable sqlTable = new SqlPhysicalTable();
sqlTable.setTargetTable(VALID_QUERY);
SqlPhysicalModel sqlModel = new SqlPhysicalModel();
sqlModel.addPhysicalTable(sqlTable);
sqlModel.setDatasource(dataSource);
analysisModel = new LogicalModel();
analysisModel.setId(LOGICAL_MODEL_ID_ANALYSIS);
analysisModel.setProperty(DSWDatasourceServiceImpl.LM_PROP_VISIBLE, LOGICAL_MODEL_CONTEXTNAME);
reportingModel = new LogicalModel();
reportingModel.setId(LOGICAL_MODEL_ID_REPORTING);
domain2Models = new Domain();
domain2Models.setId(DOMAIN_ID_2MODELS);
domain2Models.addLogicalModel(analysisModel);
domain2Models.addLogicalModel(reportingModel);
domain2Models.setLocales(Arrays.asList(new LocaleType("en_US", "Test locale")));
domain2Models.addPhysicalModel(sqlModel);
Set<String> domains = new TreeSet<String>();
domains.add(DOMAIN_ID_2MODELS);
doReturn(domain2Models).when(domainRepository).getDomain(DOMAIN_ID_2MODELS);
doReturn(domains).when(domainRepository).getDomainIds();
doAnswer(new Answer<Object>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
final String modelId = (String) invocation.getArguments()[1];
final LogicalModel modelToRemove = domain2Models.findLogicalModel(modelId);
domain2Models.getLogicalModels().remove(modelToRemove);
return null;
}
}).when(domainRepository).removeModel(anyString(), anyString());
workspace2Models = mock(ModelerWorkspace.class);
when(workspace2Models.getLogicalModel(ModelerPerspective.ANALYSIS)).thenReturn(analysisModel);
when(workspace2Models.getLogicalModel(ModelerPerspective.REPORTING)).thenReturn(reportingModel);
dswService = spy(new DSWDatasourceServiceImpl(mock(ConnectionServiceImpl.class)));
doNothing().when(dswService).checkSqlQueriesSupported(anyString());
dswService.setMetadataDomainRepository(domainRepository);
Object[][] coumnHeaders = new Object[][] { columns };
SQLMetaData metadata = mock(SQLMetaData.class);
when(metadata.getColumnHeaders()).thenReturn(coumnHeaders);
when(metadata.getJDBCColumnTypes()).thenReturn(columnTypes);
IPentahoResultSet resultSet = mock(IPentahoResultSet.class);
when(resultSet.getMetaData()).thenReturn(metadata);
doReturn(resultSet).when(sqlConnection).executeQuery(matches("(.*" + VALID_QUERY + ".*)"));
when(sqlConnection.executeQuery(matches("(.*" + QUERY_COLUMN_ALREADY_EXIST + ".*)"))).thenThrow(new SQLException("Reason", "S0021", 21));
doReturn(nativeConnection).when(sqlConnection).getNativeConnection();
MondrianCatalog catalog = mock(MondrianCatalog.class);
doReturn(catalog).when(mondrianService).getCatalog(anyString(), any(IPentahoSession.class));
pentahoObjectFactory = mock(IPentahoObjectFactory.class);
when(pentahoObjectFactory.objectDefined(anyString())).thenReturn(true);
when(pentahoObjectFactory.get(this.anyClass(), anyString(), any(IPentahoSession.class))).thenAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
if (invocation.getArguments()[0].equals(IMondrianCatalogService.class)) {
return mondrianService;
}
if (invocation.getArguments()[0].equals(IPentahoConnection.class)) {
return sqlConnection;
}
if (invocation.getArguments()[0].equals(IMetadataDomainRepository.class)) {
return domainRepository;
}
return null;
}
});
PentahoSystem.registerObjectFactory(pentahoObjectFactory);
IPentahoSession pentahoSessionMock = mock(IPentahoSession.class);
when(pentahoSessionMock.getName()).thenReturn("sessionName");
PentahoSessionHolder.setSession(pentahoSessionMock);
}
Aggregations