use of mondrian.spi.Dialect in project mondrian by pentaho.
the class XmlaCognosTest method testCognosMDXSuiteHR_001.
public void testCognosMDXSuiteHR_001() throws Exception {
Dialect dialect = TestContext.instance().getDialect();
switch(dialect.getDatabaseProduct()) {
case DERBY:
// Derby gives right answer, but many cells have wrong xsi:type.
return;
}
executeMDX();
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class RolapStatisticsCache method getTableCardinality.
private long getTableCardinality(String catalog, String schema, String table) {
final List<String> key = Arrays.asList(catalog, schema, table);
long rowCount = -1;
if (tableMap.containsKey(key)) {
rowCount = tableMap.get(key);
} else {
final Dialect dialect = star.getSqlQueryDialect();
final List<StatisticsProvider> statisticsProviders = dialect.getStatisticsProviders();
final Execution execution = new Execution(star.getSchema().getInternalConnection().getInternalStatement(), 0);
for (StatisticsProvider statisticsProvider : statisticsProviders) {
rowCount = statisticsProvider.getTableCardinality(dialect, star.getDataSource(), catalog, schema, table, execution);
if (rowCount >= 0) {
break;
}
}
// Note: If all providers fail, we put -1 into the cache, to ensure
// that we won't try again.
tableMap.put(key, rowCount);
}
return rowCount;
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class RolapStatisticsCache method getQueryCardinality.
private long getQueryCardinality(String sql) {
long rowCount = -1;
if (queryMap.containsKey(sql)) {
rowCount = queryMap.get(sql);
} else {
final Dialect dialect = star.getSqlQueryDialect();
final List<StatisticsProvider> statisticsProviders = dialect.getStatisticsProviders();
final Execution execution = new Execution(star.getSchema().getInternalConnection().getInternalStatement(), 0);
for (StatisticsProvider statisticsProvider : statisticsProviders) {
rowCount = statisticsProvider.getQueryCardinality(dialect, star.getDataSource(), sql, execution);
if (rowCount >= 0) {
break;
}
}
// Note: If all providers fail, we put -1 into the cache, to ensure
// that we won't try again.
queryMap.put(sql, rowCount);
}
return rowCount;
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class SqlStatement method guessTypes.
public List<Type> guessTypes() throws SQLException {
final ResultSetMetaData metaData = resultSet.getMetaData();
final int columnCount = metaData.getColumnCount();
assert this.types == null || this.types.size() == columnCount;
List<Type> types = new ArrayList<Type>();
for (int i = 0; i < columnCount; i++) {
final Type suggestedType = this.types == null ? null : this.types.get(i);
// There might not be a schema constructed yet,
// so watch out here for NPEs.
RolapSchema schema = locus.execution.getMondrianStatement().getMondrianConnection().getSchema();
Dialect dialect = getDialect(schema);
if (suggestedType != null) {
types.add(suggestedType);
} else if (dialect != null) {
types.add(dialect.getType(metaData, i));
} else {
types.add(Type.OBJECT);
}
}
return types;
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class AbstractQuerySpec method generateSqlQuery.
public Pair<String, List<SqlStatement.Type>> generateSqlQuery() {
SqlQuery sqlQuery = newSqlQuery();
int k = getDistinctMeasureCount();
final Dialect dialect = sqlQuery.getDialect();
final Map<String, String> groupingSetsAliases;
if (!dialect.allowsCountDistinct() && k > 0 || !dialect.allowsMultipleCountDistinct() && k > 1) {
groupingSetsAliases = distinctGenerateSql(sqlQuery, countOnly);
} else {
groupingSetsAliases = nonDistinctGenerateSql(sqlQuery);
}
if (!countOnly) {
addGroupingFunction(sqlQuery);
addGroupingSets(sqlQuery, groupingSetsAliases);
}
return sqlQuery.toSqlAndTypes();
}
Aggregations