use of mondrian.spi.Dialect in project mondrian by pentaho.
the class FastBatchingCellReaderTest method testLoadDistinctSqlMeasure.
/**
* Checks that in dialects that request it (e.g. LucidDB),
* distinct aggregates based on SQL expressions,
* e.g. <code>count(distinct "col1" + "col2"), count(distinct query)</code>,
* are loaded individually, and separately from the other aggregates.
*/
public void testLoadDistinctSqlMeasure() {
// Some databases cannot handle scalar subqueries inside
// count(distinct).
final Dialect dialect = getTestContext().getDialect();
switch(dialect.getDatabaseProduct()) {
case ORACLE:
// Oracle gives 'feature not supported' in Express 10.2
case ACCESS:
case TERADATA:
// the 'select' keyword." in 12.0.
case NEOVIEW:
// aggregate function."
case NETEZZA:
// supported"
case GREENPLUM:
// Greenplum says 'Does not support yet that query'
case VERTICA:
// Vertica says "Aggregate function calls cannot contain subqueries"
return;
}
String cube = "<Cube name=\"Warehouse2\">" + " <Table name=\"warehouse\"/>" + " <DimensionUsage name=\"Store Type\" source=\"Store Type\" foreignKey=\"stores_id\"/>" + " <Measure name=\"Count Distinct of Warehouses (Large Owned)\" aggregator=\"distinct count\" formatString=\"#,##0\">" + " <MeasureExpression>" + " <SQL dialect=\"generic\">(select `warehouse_class`.`warehouse_class_id` AS `warehouse_class_id` from `warehouse_class` AS `warehouse_class` where `warehouse_class`.`warehouse_class_id` = `warehouse`.`warehouse_class_id` and `warehouse_class`.`description` = 'Large Owned')</SQL>" + " </MeasureExpression>" + " </Measure>" + " <Measure name=\"Count Distinct of Warehouses (Large Independent)\" aggregator=\"distinct count\" formatString=\"#,##0\">" + " <MeasureExpression>" + " <SQL dialect=\"generic\">(select `warehouse_class`.`warehouse_class_id` AS `warehouse_class_id` from `warehouse_class` AS `warehouse_class` where `warehouse_class`.`warehouse_class_id` = `warehouse`.`warehouse_class_id` and `warehouse_class`.`description` = 'Large Independent')</SQL>" + " </MeasureExpression>" + " </Measure>" + " <Measure name=\"Count All of Warehouses (Large Independent)\" aggregator=\"count\" formatString=\"#,##0\">" + " <MeasureExpression>" + " <SQL dialect=\"generic\">(select `warehouse_class`.`warehouse_class_id` AS `warehouse_class_id` from `warehouse_class` AS `warehouse_class` where `warehouse_class`.`warehouse_class_id` = `warehouse`.`warehouse_class_id` and `warehouse_class`.`description` = 'Large Independent')</SQL>" + " </MeasureExpression>" + " </Measure>" + " <Measure name=\"Count Distinct Store+Warehouse\" aggregator=\"distinct count\" formatString=\"#,##0\">" + " <MeasureExpression><SQL dialect=\"generic\">`store_id`+`warehouse_id`</SQL></MeasureExpression>" + " </Measure>" + " <Measure name=\"Count All Store+Warehouse\" aggregator=\"count\" formatString=\"#,##0\">" + " <MeasureExpression><SQL dialect=\"generic\">`store_id`+`warehouse_id`</SQL></MeasureExpression>" + " </Measure>" + " <Measure name=\"Store Count\" column=\"stores_id\" aggregator=\"count\" formatString=\"#,###\"/>" + "</Cube>";
cube = cube.replaceAll("`", dialect.getQuoteIdentifierString());
if (dialect.getDatabaseProduct() == Dialect.DatabaseProduct.ORACLE) {
cube = cube.replaceAll(" AS ", " ");
}
String query = "select " + " [Store Type].Children on rows, " + " {[Measures].[Count Distinct of Warehouses (Large Owned)]," + " [Measures].[Count Distinct of Warehouses (Large Independent)]," + " [Measures].[Count All of Warehouses (Large Independent)]," + " [Measures].[Count Distinct Store+Warehouse]," + " [Measures].[Count All Store+Warehouse]," + " [Measures].[Store Count]} on columns " + "from [Warehouse2]";
TestContext testContext = TestContext.instance().create(null, cube, null, null, null, null);
String desiredResult = "Axis #0:\n" + "{}\n" + "Axis #1:\n" + "{[Measures].[Count Distinct of Warehouses (Large Owned)]}\n" + "{[Measures].[Count Distinct of Warehouses (Large Independent)]}\n" + "{[Measures].[Count All of Warehouses (Large Independent)]}\n" + "{[Measures].[Count Distinct Store+Warehouse]}\n" + "{[Measures].[Count All Store+Warehouse]}\n" + "{[Measures].[Store Count]}\n" + "Axis #2:\n" + "{[Store Type].[Deluxe Supermarket]}\n" + "{[Store Type].[Gourmet Supermarket]}\n" + "{[Store Type].[HeadQuarters]}\n" + "{[Store Type].[Mid-Size Grocery]}\n" + "{[Store Type].[Small Grocery]}\n" + "{[Store Type].[Supermarket]}\n" + "Row #0: 1\n" + "Row #0: 0\n" + "Row #0: 0\n" + "Row #0: 6\n" + "Row #0: 6\n" + "Row #0: 6\n" + "Row #1: 1\n" + "Row #1: 0\n" + "Row #1: 0\n" + "Row #1: 2\n" + "Row #1: 2\n" + "Row #1: 2\n" + "Row #2: \n" + "Row #2: \n" + "Row #2: \n" + "Row #2: \n" + "Row #2: \n" + "Row #2: \n" + "Row #3: 0\n" + "Row #3: 1\n" + "Row #3: 1\n" + "Row #3: 4\n" + "Row #3: 4\n" + "Row #3: 4\n" + "Row #4: 0\n" + "Row #4: 1\n" + "Row #4: 1\n" + "Row #4: 4\n" + "Row #4: 4\n" + "Row #4: 4\n" + "Row #5: 0\n" + "Row #5: 1\n" + "Row #5: 3\n" + "Row #5: 8\n" + "Row #5: 8\n" + "Row #5: 8\n";
testContext.assertQueryReturns(query, desiredResult);
String loadCountDistinct_luciddb1 = "select " + "\"store\".\"store_type\" as \"c0\", " + "count(distinct " + "(select \"warehouse_class\".\"warehouse_class_id\" AS \"warehouse_class_id\" " + "from \"warehouse_class\" AS \"warehouse_class\" " + "where \"warehouse_class\".\"warehouse_class_id\" = \"warehouse\".\"warehouse_class_id\" and \"warehouse_class\".\"description\" = 'Large Owned')) as \"m0\" " + "from \"store\" as \"store\", \"warehouse\" as \"warehouse\" " + "where \"warehouse\".\"stores_id\" = \"store\".\"store_id\" " + "group by \"store\".\"store_type\"";
String loadCountDistinct_luciddb2 = "select " + "\"store\".\"store_type\" as \"c0\", " + "count(distinct " + "(select \"warehouse_class\".\"warehouse_class_id\" AS \"warehouse_class_id\" " + "from \"warehouse_class\" AS \"warehouse_class\" " + "where \"warehouse_class\".\"warehouse_class_id\" = \"warehouse\".\"warehouse_class_id\" and \"warehouse_class\".\"description\" = 'Large Independent')) as \"m0\" " + "from \"store\" as \"store\", \"warehouse\" as \"warehouse\" " + "where \"warehouse\".\"stores_id\" = \"store\".\"store_id\" " + "group by \"store\".\"store_type\"";
String loadOtherAggs_luciddb = "select " + "\"store\".\"store_type\" as \"c0\", " + "count(" + "(select \"warehouse_class\".\"warehouse_class_id\" AS \"warehouse_class_id\" " + "from \"warehouse_class\" AS \"warehouse_class\" " + "where \"warehouse_class\".\"warehouse_class_id\" = \"warehouse\".\"warehouse_class_id\" and \"warehouse_class\".\"description\" = 'Large Independent')) as \"m0\", " + "count(distinct \"store_id\"+\"warehouse_id\") as \"m1\", " + "count(\"store_id\"+\"warehouse_id\") as \"m2\", " + "count(\"warehouse\".\"stores_id\") as \"m3\" " + "from \"store\" as \"store\", \"warehouse\" as \"warehouse\" " + "where \"warehouse\".\"stores_id\" = \"store\".\"store_id\" " + "group by \"store\".\"store_type\"";
// Derby splits into multiple statements.
String loadCountDistinct_derby1 = "select \"store\".\"store_type\" as \"c0\", count(distinct (select \"warehouse_class\".\"warehouse_class_id\" AS \"warehouse_class_id\" from \"warehouse_class\" AS \"warehouse_class\" where \"warehouse_class\".\"warehouse_class_id\" = \"warehouse\".\"warehouse_class_id\" and \"warehouse_class\".\"description\" = 'Large Owned')) as \"m0\" from \"store\" as \"store\", \"warehouse\" as \"warehouse\" where \"warehouse\".\"stores_id\" = \"store\".\"store_id\" group by \"store\".\"store_type\"";
String loadCountDistinct_derby2 = "select \"store\".\"store_type\" as \"c0\", count(distinct (select \"warehouse_class\".\"warehouse_class_id\" AS \"warehouse_class_id\" from \"warehouse_class\" AS \"warehouse_class\" where \"warehouse_class\".\"warehouse_class_id\" = \"warehouse\".\"warehouse_class_id\" and \"warehouse_class\".\"description\" = 'Large Independent')) as \"m0\" from \"store\" as \"store\", \"warehouse\" as \"warehouse\" where \"warehouse\".\"stores_id\" = \"store\".\"store_id\" group by \"store\".\"store_type\"";
String loadCountDistinct_derby3 = "select \"store\".\"store_type\" as \"c0\", count(distinct \"store_id\"+\"warehouse_id\") as \"m0\" from \"store\" as \"store\", \"warehouse\" as \"warehouse\" where \"warehouse\".\"stores_id\" = \"store\".\"store_id\" group by \"store\".\"store_type\"";
String loadOtherAggs_derby = "select \"store\".\"store_type\" as \"c0\", count((select \"warehouse_class\".\"warehouse_class_id\" AS \"warehouse_class_id\" from \"warehouse_class\" AS \"warehouse_class\" where \"warehouse_class\".\"warehouse_class_id\" = \"warehouse\".\"warehouse_class_id\" and \"warehouse_class\".\"description\" = 'Large Independent')) as \"m0\", count(\"store_id\"+\"warehouse_id\") as \"m1\", count(\"warehouse\".\"stores_id\") as \"m2\" from \"store\" as \"store\", \"warehouse\" as \"warehouse\" where \"warehouse\".\"stores_id\" = \"store\".\"store_id\" group by \"store\".\"store_type\"";
// MySQL does it in one statement.
String load_mysql = "select" + " `store`.`store_type` as `c0`," + " count(distinct (select `warehouse_class`.`warehouse_class_id` AS `warehouse_class_id` from `warehouse_class` AS `warehouse_class` where `warehouse_class`.`warehouse_class_id` = `warehouse`.`warehouse_class_id` and `warehouse_class`.`description` = 'Large Owned')) as `m0`," + " count(distinct (select `warehouse_class`.`warehouse_class_id` AS `warehouse_class_id` from `warehouse_class` AS `warehouse_class` where `warehouse_class`.`warehouse_class_id` = `warehouse`.`warehouse_class_id` and `warehouse_class`.`description` = 'Large Independent')) as `m1`," + " count((select `warehouse_class`.`warehouse_class_id` AS `warehouse_class_id` from `warehouse_class` AS `warehouse_class` where `warehouse_class`.`warehouse_class_id` = `warehouse`.`warehouse_class_id` and `warehouse_class`.`description` = 'Large Independent')) as `m2`," + " count(distinct `store_id`+`warehouse_id`) as `m3`," + " count(`store_id`+`warehouse_id`) as `m4`," + " count(`warehouse`.`stores_id`) as `m5` " + "from `store` as `store`," + " `warehouse` as `warehouse` " + "where `warehouse`.`stores_id` = `store`.`store_id` " + "group by `store`.`store_type`";
SqlPattern[] patterns = { new SqlPattern(Dialect.DatabaseProduct.LUCIDDB, loadCountDistinct_luciddb1, loadCountDistinct_luciddb1), new SqlPattern(Dialect.DatabaseProduct.LUCIDDB, loadCountDistinct_luciddb2, loadCountDistinct_luciddb2), new SqlPattern(Dialect.DatabaseProduct.LUCIDDB, loadOtherAggs_luciddb, loadOtherAggs_luciddb), new SqlPattern(Dialect.DatabaseProduct.DERBY, loadCountDistinct_derby1, loadCountDistinct_derby1), new SqlPattern(Dialect.DatabaseProduct.DERBY, loadCountDistinct_derby2, loadCountDistinct_derby2), new SqlPattern(Dialect.DatabaseProduct.DERBY, loadCountDistinct_derby3, loadCountDistinct_derby3), new SqlPattern(Dialect.DatabaseProduct.DERBY, loadOtherAggs_derby, loadOtherAggs_derby), new SqlPattern(Dialect.DatabaseProduct.MYSQL, load_mysql, load_mysql) };
assertQuerySql(testContext, query, patterns);
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class SqlStatementTest method testGetDialect.
public void testGetDialect() {
RolapSchema schema = mock(RolapSchema.class);
Dialect dialect = mock(Dialect.class);
when(schema.getDialect()).thenReturn(dialect);
Dialect dialectReturn = statement.getDialect(schema);
assertNotNull(dialectReturn);
assertEquals(dialect, dialectReturn);
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class TestAggregationManager method testNonEmptyCrossJoinLoneAxis.
/**
* Tests that a NonEmptyCrossJoin uses the measure referenced by the query
* (Store Sales) instead of the default measure (Unit Sales) in the case
* where the query only has one result axis. The setup here is necessarily
* elaborate because the original bug was quite arbitrary.
*/
public void testNonEmptyCrossJoinLoneAxis() {
// Not sure what this test is checking.
// For now, only run it for derby.
final Dialect dialect = getTestContext().getDialect();
if (dialect.getDatabaseProduct() != Dialect.DatabaseProduct.DERBY) {
return;
}
String mdxQuery = "With " + "Set [*NATIVE_CJ_SET] as " + "'NonEmptyCrossJoin([*BASE_MEMBERS_Store],[*BASE_MEMBERS_Product])' " + "Set [*BASE_MEMBERS_Store] as '{[Store].[All Stores].[USA]}' " + "Set [*GENERATED_MEMBERS_Store] as " + "'Generate([*NATIVE_CJ_SET], {[Store].CurrentMember})' " + "Set [*BASE_MEMBERS_Product] as " + "'{[Product].[All Products].[Food],[Product].[All Products].[Drink]}' " + "Set [*GENERATED_MEMBERS_Product] as " + "'Generate([*NATIVE_CJ_SET], {[Product].CurrentMember})' " + "Member [Store].[*FILTER_MEMBER] as 'Aggregate ([*GENERATED_MEMBERS_Store])' " + "Member [Product].[*FILTER_MEMBER] as 'Aggregate ([*GENERATED_MEMBERS_Product])' " + "Select {[Measures].[Store Sales]} on columns " + "From [Sales] " + "Where ([Store].[*FILTER_MEMBER], [Product].[*FILTER_MEMBER])";
String derbySql = "select " + "\"store\".\"store_country\" as \"c0\", " + "\"time_by_day\".\"the_year\" as \"c1\", " + "\"product_class\".\"product_family\" as \"c2\", " + "sum(\"sales_fact_1997\".\"unit_sales\") as \"m0\" " + "from " + "\"store\" as \"store\", " + "\"sales_fact_1997\" as \"sales_fact_1997\", " + "\"time_by_day\" as \"time_by_day\", " + "\"product_class\" as \"product_class\", " + "\"product\" as \"product\" " + "where " + "\"sales_fact_1997\".\"store_id\" = \"store\".\"store_id\" and " + "\"store\".\"store_country\" = 'USA' and " + "\"sales_fact_1997\".\"time_id\" = \"time_by_day\".\"time_id\" and " + "\"time_by_day\".\"the_year\" = 1997 and " + "\"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\" and " + "\"product\".\"product_class_id\" = \"product_class\".\"product_class_id\" " + "group by " + "\"store\".\"store_country\", \"time_by_day\".\"the_year\", " + "\"product_class\".\"product_family\"";
SqlPattern[] patterns = { new SqlPattern(Dialect.DatabaseProduct.DERBY, derbySql, derbySql) };
// For derby, the TestAggregationManager.testNonEmptyCrossJoinLoneAxis
// test fails if the non-empty crossjoin optimizer is used.
// With it on one gets a recursive call coming through the
// RolapEvaluator.getCachedResult.
assertNoQuerySql(mdxQuery, patterns);
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class VirtualCubeTest method testNativeSetCaching.
/**
* Checks that native set caching considers base cubes in the cache key.
* Native sets referencing different base cubes do not share the cached
* result.
*/
public void testNativeSetCaching() {
// Only need to run this against one db to verify caching
// behavior is correct.
final Dialect dialect = getTestContext().getDialect();
if (dialect.getDatabaseProduct() != Dialect.DatabaseProduct.DERBY) {
return;
}
if (!MondrianProperties.instance().EnableNativeCrossJoin.get() && !MondrianProperties.instance().EnableNativeNonEmpty.get()) {
// is enabled.
return;
}
String query1 = "With " + "Set [*NATIVE_CJ_SET] as 'NonEmptyCrossJoin([Product].[Product Family].Members, [Store].[Store Country].Members)' " + "Select " + "{[Store Sales]} on columns, " + "Non Empty Generate([*NATIVE_CJ_SET], {([Product].CurrentMember,[Store].CurrentMember)}) on rows " + "From [Warehouse and Sales]";
String query2 = "With " + "Set [*NATIVE_CJ_SET] as 'NonEmptyCrossJoin([Product].[Product Family].Members, [Store].[Store Country].Members)' " + "Select " + "{[Warehouse Sales]} on columns, " + "Non Empty Generate([*NATIVE_CJ_SET], {([Product].CurrentMember,[Store].CurrentMember)}) on rows " + "From [Warehouse and Sales]";
String derbyNecjSql1, derbyNecjSql2;
if (MondrianProperties.instance().EnableNativeCrossJoin.get()) {
derbyNecjSql1 = "select " + "\"product_class\".\"product_family\", " + "\"store\".\"store_country\" " + "from " + "\"product\" as \"product\", " + "\"product_class\" as \"product_class\", " + "\"sales_fact_1997\" as \"sales_fact_1997\", " + "\"store\" as \"store\" " + "where " + "\"product\".\"product_class_id\" = \"product_class\".\"product_class_id\" " + "and \"sales_fact_1997\".\"product_id\" = \"product\".\"product_id\" " + "and \"sales_fact_1997\".\"store_id\" = \"store\".\"store_id\" " + "group by \"product_class\".\"product_family\", \"store\".\"store_country\" " + "order by 1 ASC, 2 ASC";
derbyNecjSql2 = "select " + "\"product_class\".\"product_family\", " + "\"store\".\"store_country\" " + "from " + "\"product\" as \"product\", " + "\"product_class\" as \"product_class\", " + "\"inventory_fact_1997\" as \"inventory_fact_1997\", " + "\"store\" as \"store\" " + "where " + "\"product\".\"product_class_id\" = \"product_class\".\"product_class_id\" " + "and \"inventory_fact_1997\".\"product_id\" = \"product\".\"product_id\" " + "and \"inventory_fact_1997\".\"store_id\" = \"store\".\"store_id\" " + "group by \"product_class\".\"product_family\", \"store\".\"store_country\" " + "order by 1 ASC, 2 ASC";
} else {
// NECJ is truend off so native NECJ SQL will not be generated;
// however, because the NECJ set should not find match in the cache,
// each NECJ input will still be joined with the correct
// fact table if NonEmpty condition is natively evaluated.
derbyNecjSql1 = "select " + "\"store\".\"store_country\" " + "from " + "\"store\" as \"store\", " + "\"sales_fact_1997\" as \"sales_fact_1997\" " + "where " + "\"sales_fact_1997\".\"store_id\" = \"store\".\"store_id\" " + "group by \"store\".\"store_country\" " + "order by 1 ASC";
derbyNecjSql2 = "select " + "\"store\".\"store_country\" " + "from " + "\"store\" as \"store\", " + "\"inventory_fact_1997\" as \"inventory_fact_1997\" " + "where " + "\"inventory_fact_1997\".\"store_id\" = \"store\".\"store_id\" " + "group by \"store\".\"store_country\" " + "order by 1 ASC";
}
SqlPattern[] patterns1 = { new SqlPattern(Dialect.DatabaseProduct.DERBY, derbyNecjSql1, derbyNecjSql1) };
SqlPattern[] patterns2 = { new SqlPattern(Dialect.DatabaseProduct.DERBY, derbyNecjSql2, derbyNecjSql2) };
// Run query 1 with cleared cache;
// Make sure NECJ 1 is evaluated natively.
assertQuerySql(query1, patterns1, true);
// Now run query 2 with warm cache;
// Make sure NECJ 2 does not reuse the cache result from NECJ 1, and
// NECJ 2 is evaluated natively.
assertQuerySql(query2, patterns2, false);
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class BatchTestCase method assertRequestSql.
/**
* Checks that a given sequence of cell requests results in a
* particular SQL statement being generated.
*
* <p>Always clears the cache before running the requests.
*
* <p>Runs the requests once for each SQL pattern in the current
* dialect. If there are multiple patterns, runs the MDX query multiple
* times, and expects to see each SQL statement appear. If there are no
* patterns in this dialect, the test trivially succeeds.
*
* @param requests Sequence of cell requests
* @param patterns Set of patterns
* @param negative Set to false in order to 'expect' a query or
* true to 'forbid' a query.
*/
protected void assertRequestSql(CellRequest[] requests, SqlPattern[] patterns, boolean negative) {
final RolapStar star = requests[0].getMeasure().getStar();
final String cubeName = requests[0].getMeasure().getCubeName();
final RolapCube cube = lookupCube(cubeName);
final Dialect sqlDialect = star.getSqlQueryDialect();
Dialect.DatabaseProduct d = sqlDialect.getDatabaseProduct();
SqlPattern sqlPattern = SqlPattern.getPattern(d, patterns);
if (d == Dialect.DatabaseProduct.UNKNOWN) {
// test. We do not print any warning message.
return;
}
boolean patternFound = false;
for (SqlPattern pattern : patterns) {
if (!pattern.hasDatabaseProduct(d)) {
continue;
}
patternFound = true;
clearCache(cube);
String sql = sqlPattern.getSql();
String trigger = sqlPattern.getTriggerSql();
switch(d) {
case ORACLE:
sql = sql.replaceAll(" =as= ", " ");
trigger = trigger.replaceAll(" =as= ", " ");
break;
case TERADATA:
sql = sql.replaceAll(" =as= ", " as ");
trigger = trigger.replaceAll(" =as= ", " as ");
break;
}
// Create a dummy DataSource which will throw a 'bomb' if it is
// asked to execute a particular SQL statement, but will otherwise
// behave exactly the same as the current DataSource.
RolapUtil.setHook(new TriggerHook(trigger));
Bomb bomb;
final Execution execution = new Execution(((RolapConnection) getConnection()).getInternalStatement(), 1000);
final AggregationManager aggMgr = execution.getMondrianStatement().getMondrianConnection().getServer().getAggregationManager();
final Locus locus = new Locus(execution, "BatchTestCase", "BatchTestCase");
try {
FastBatchingCellReader fbcr = new FastBatchingCellReader(execution, getCube(cubeName), aggMgr);
for (CellRequest request : requests) {
fbcr.recordCellRequest(request);
}
// The FBCR will presume there is a current Locus in the stack,
// so let's create a mock one.
Locus.push(locus);
fbcr.loadAggregations();
bomb = null;
} catch (Bomb e) {
bomb = e;
} catch (RuntimeException e) {
// Walk up the exception tree and see if the root cause
// was a SQL bomb.
bomb = Util.getMatchingCause(e, Bomb.class);
if (bomb == null) {
throw e;
}
} finally {
RolapUtil.setHook(null);
Locus.pop(locus);
}
if (!negative && bomb == null) {
fail("expected query [" + sql + "] did not occur");
} else if (negative && bomb != null) {
fail("forbidden query [" + sql + "] detected");
}
TestContext.assertEqualsVerbose(replaceQuotes(sql), replaceQuotes(bomb.sql));
}
// dialect.
if (!patternFound) {
String warnDialect = MondrianProperties.instance().WarnIfNoPatternForDialect.get();
if (warnDialect.equals(d.toString())) {
System.out.println("[No expected SQL statements found for dialect \"" + sqlDialect.toString() + "\" and test not run]");
}
}
}
Aggregations