use of mondrian.spi.Dialect in project mondrian by pentaho.
the class SqlQueryTest method testToStringForGroupingSetSqlWithEmptyGroup.
public void testToStringForGroupingSetSqlWithEmptyGroup() {
if (!isGroupingSetsSupported()) {
return;
}
final Dialect dialect = getTestContext().getDialect();
for (boolean b : new boolean[] { false, true }) {
SqlQuery sqlQuery = new SqlQuery(getTestContext().getDialect(), b);
sqlQuery.addSelect("c1", null);
sqlQuery.addSelect("c2", null);
sqlQuery.addFromTable("s", "t1", "t1alias", null, null, true);
sqlQuery.addWhere("a=b");
sqlQuery.addGroupingFunction("g1");
sqlQuery.addGroupingFunction("g2");
ArrayList<String> groupingsetsList = new ArrayList<String>();
groupingsetsList.add("gs1");
groupingsetsList.add("gs2");
groupingsetsList.add("gs3");
sqlQuery.addGroupingSet(new ArrayList<String>());
sqlQuery.addGroupingSet(groupingsetsList);
String expected;
if (b) {
expected = "select\n" + " c1 as \"c0\",\n" + " c2 as \"c1\",\n" + " grouping(g1) as \"g0\",\n" + " grouping(g2) as \"g1\"\n" + "from\n" + " \"s\".\"t1\" =as= \"t1alias\"\n" + "where\n" + " a=b\n" + "group by grouping sets (\n" + " (),\n" + " (gs1, gs2, gs3))";
} else {
expected = "select c1 as \"c0\", c2 as \"c1\", grouping(g1) as \"g0\", " + "grouping(g2) as \"g1\" from \"s\".\"t1\" =as= \"t1alias\" where a=b " + "group by grouping sets ((), (gs1, gs2, gs3))";
}
assertEquals(dialectize(dialect.getDatabaseProduct(), expected), dialectize(sqlQuery.getDialect().getDatabaseProduct(), sqlQuery.toString()));
}
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class SqlQueryTest method testDoubleInList.
/**
* Verifies that the correct SQL string is generated for literals of
* SQL type "double".
*
* <p>Mondrian only generates SQL DOUBLE values in a special format for
* LucidDB; therefore, this test is a no-op on other databases.
*/
public void testDoubleInList() {
final Dialect dialect = getTestContext().getDialect();
if (dialect.getDatabaseProduct() != Dialect.DatabaseProduct.LUCIDDB) {
return;
}
propSaver.set(prop.IgnoreInvalidMembers, true);
propSaver.set(prop.IgnoreInvalidMembersDuringQuery, true);
// assertQuerySql(testContext, query, patterns);
// Test when the double value itself cotnains "E".
String dimensionSqlExpression = "cast(cast(\"salary\" as double)*cast(1000.0 as double)/cast(3.1234567890123456 as double) as double)\n";
String cubeFirstPart = "<Cube name=\"Sales 3\">\n" + " <Table name=\"sales_fact_1997\"/>\n" + " <Dimension name=\"StoreEmpSalary\" foreignKey=\"store_id\">\n" + " <Hierarchy hasAll=\"true\" allMemberName=\"All Salary\" primaryKey=\"store_id\">\n" + " <Table name=\"employee\"/>\n" + " <Level name=\"Salary\" column=\"salary\" type=\"Numeric\" uniqueMembers=\"true\" approxRowCount=\"10000000\">\n" + " <KeyExpression>\n" + " <SQL dialect=\"luciddb\">\n";
String cubeSecondPart = " </SQL>\n" + " </KeyExpression>\n" + " </Level>\n" + " </Hierarchy>\n" + " </Dimension>" + " <Measure name=\"Store Cost\" column=\"store_cost\" aggregator=\"sum\"/>\n" + "</Cube>";
String cube = cubeFirstPart + dimensionSqlExpression + cubeSecondPart;
String query = "select " + "{[StoreEmpSalary].[All Salary].[6403.162057613773],[StoreEmpSalary].[All Salary].[1184584.980658548],[StoreEmpSalary].[All Salary].[1344664.0320988924], " + " [StoreEmpSalary].[All Salary].[1376679.8423869612],[StoreEmpSalary].[All Salary].[1408695.65267503],[StoreEmpSalary].[All Salary].[1440711.462963099], " + " [StoreEmpSalary].[All Salary].[1456719.3681071333],[StoreEmpSalary].[All Salary].[1472727.2732511677],[StoreEmpSalary].[All Salary].[1488735.1783952022], " + " [StoreEmpSalary].[All Salary].[1504743.0835392366],[StoreEmpSalary].[All Salary].[1536758.8938273056],[StoreEmpSalary].[All Salary].[1600790.5144034433], " + " [StoreEmpSalary].[All Salary].[1664822.134979581],[StoreEmpSalary].[All Salary].[1888932.806996063],[StoreEmpSalary].[All Salary].[1952964.4275722008], " + " [StoreEmpSalary].[All Salary].[1984980.2378602696],[StoreEmpSalary].[All Salary].[2049011.8584364073],[StoreEmpSalary].[All Salary].[2081027.6687244761], " + " [StoreEmpSalary].[All Salary].[2113043.479012545],[StoreEmpSalary].[All Salary].[2145059.289300614],[StoreEmpSalary].[All Salary].[2.5612648230455093E7]} " + " on rows, {[Measures].[Store Cost]} on columns from [Sales 3]";
// Notice there are a few members missing in this sql. This is a LucidDB
// bug wrt comparison involving "approximate number literals".
// Mondrian properties "IgnoreInvalidMembers" and
// "IgnoreInvalidMembersDuringQuery" are required for this MDX to
// finish, even though the the generated sql(below) and the final result
// are both incorrect.
String loadSqlLucidDB = "select cast(cast(\"salary\" as double)*cast(1000.0 as double)/cast(3.1234567890123456 as double) as double) as \"c0\", " + "sum(\"sales_fact_1997\".\"store_cost\") as \"m0\" " + "from \"employee\" as \"employee\", \"sales_fact_1997\" as \"sales_fact_1997\" " + "where \"sales_fact_1997\".\"store_id\" = \"employee\".\"store_id\" and " + "cast(cast(\"salary\" as double)*cast(1000.0 as double)/cast(3.1234567890123456 as double) as double) in " + "(6403.162057613773E0, 1184584.980658548E0, 1344664.0320988924E0, " + "1376679.8423869612E0, 1408695.65267503E0, 1440711.462963099E0, " + "1456719.3681071333E0, 1488735.1783952022E0, " + "1504743.0835392366E0, 1536758.8938273056E0, " + "1664822.134979581E0, 1888932.806996063E0, 1952964.4275722008E0, " + "1984980.2378602696E0, 2049011.8584364073E0, " + "2113043.479012545E0, 2145059.289300614E0, 2.5612648230455093E7) " + "group by cast(cast(\"salary\" as double)*cast(1000.0 as double)/cast(3.1234567890123456 as double) as double)";
SqlPattern[] patterns = { new SqlPattern(Dialect.DatabaseProduct.LUCIDDB, loadSqlLucidDB, loadSqlLucidDB) };
TestContext testContext = TestContext.instance().create(null, cube, null, null, null, null);
assertQuerySql(testContext, query, patterns);
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class SqlQueryTest method testToStringForSingleGroupingSetSql.
public void testToStringForSingleGroupingSetSql() {
if (!isGroupingSetsSupported()) {
return;
}
for (boolean b : new boolean[] { false, true }) {
Dialect dialect = getTestContext().getDialect();
SqlQuery sqlQuery = new SqlQuery(dialect, b);
sqlQuery.addSelect("c1", null);
sqlQuery.addSelect("c2", null);
sqlQuery.addGroupingFunction("gf0");
sqlQuery.addFromTable("s", "t1", "t1alias", null, null, true);
sqlQuery.addWhere("a=b");
ArrayList<String> groupingsetsList = new ArrayList<String>();
groupingsetsList.add("gs1");
groupingsetsList.add("gs2");
groupingsetsList.add("gs3");
sqlQuery.addGroupingSet(groupingsetsList);
String expected;
String lineSep = System.getProperty("line.separator");
if (!b) {
expected = "select c1 as \"c0\", c2 as \"c1\", grouping(gf0) as \"g0\" " + "from \"s\".\"t1\" =as= \"t1alias\" where a=b " + "group by grouping sets ((gs1, gs2, gs3))";
} else {
expected = "select" + lineSep + " c1 as \"c0\"," + lineSep + " c2 as \"c1\"," + lineSep + " grouping(gf0) as \"g0\"" + lineSep + "from" + lineSep + " \"s\".\"t1\" =as= \"t1alias\"" + lineSep + "where" + lineSep + " a=b" + lineSep + "group by grouping sets (" + lineSep + " (gs1, gs2, gs3))";
}
assertEquals(dialectize(dialect.getDatabaseProduct(), expected), dialectize(sqlQuery.getDialect().getDatabaseProduct(), sqlQuery.toString()));
}
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class CompatibilityTest method testCaseInsensitiveNullMember.
/**
* Tests that a #null member on a Hiearchy Level of type String can
* still be looked up when case sensitive is off.
*/
public void testCaseInsensitiveNullMember() {
final Dialect dialect = getTestContext().getDialect();
if (dialect.getDatabaseProduct() == Dialect.DatabaseProduct.LUCIDDB) {
// types to apply a CAST.
return;
}
if (!isDefaultNullMemberRepresentation()) {
return;
}
final String cubeName = "Sales_inline";
final TestContext testContext = TestContext.instance().create(null, "<Cube name=\"" + cubeName + "\">\n" + " <Table name=\"sales_fact_1997\"/>\n" + " <DimensionUsage name=\"Time\" source=\"Time\" foreignKey=\"time_id\"/>\n" + " <Dimension name=\"Alternative Promotion\" foreignKey=\"promotion_id\">\n" + " <Hierarchy hasAll=\"true\" primaryKey=\"promo_id\">\n" + " <InlineTable alias=\"alt_promotion\">\n" + " <ColumnDefs>\n" + " <ColumnDef name=\"promo_id\" type=\"Numeric\"/>\n" + " <ColumnDef name=\"promo_name\" type=\"String\"/>\n" + " </ColumnDefs>\n" + " <Rows>\n" + " <Row>\n" + " <Value column=\"promo_id\">0</Value>\n" + " <Value column=\"promo_name\">Promo0</Value>\n" + " </Row>\n" + " <Row>\n" + " <Value column=\"promo_id\">1</Value>\n" + " </Row>\n" + " </Rows>\n" + " </InlineTable>\n" + " <Level name=\"Alternative Promotion\" column=\"promo_name\" uniqueMembers=\"true\"/> \n" + " </Hierarchy>\n" + " </Dimension>\n" + " <Measure name=\"Unit Sales\" column=\"unit_sales\" aggregator=\"sum\"\n" + " formatString=\"Standard\" visible=\"false\"/>\n" + " <Measure name=\"Store Sales\" column=\"store_sales\" aggregator=\"sum\"\n" + " formatString=\"#,###.00\"/>\n" + "</Cube>", null, null, null, null);
// This test should work irrespective of the case-sensitivity setting.
Util.discard(props.CaseSensitive.get());
testContext.assertQueryReturns("select {[Measures].[Unit Sales]} ON COLUMNS,\n" + " {[Alternative Promotion].[#null]} ON ROWS \n" + " from [Sales_inline]", "Axis #0:\n" + "{}\n" + "Axis #1:\n" + "{[Measures].[Unit Sales]}\n" + "Axis #2:\n" + "{[Alternative Promotion].[#null]}\n" + "Row #0: \n");
}
use of mondrian.spi.Dialect in project mondrian by pentaho.
the class RolapStatisticsCache method getColumnCardinality.
private long getColumnCardinality(String catalog, String schema, String table, String column) {
final List<String> key = Arrays.asList(catalog, schema, table, column);
long rowCount = -1;
if (columnMap.containsKey(key)) {
rowCount = columnMap.get(key);
} else {
final Dialect dialect = star.getSqlQueryDialect();
final List<StatisticsProvider> statisticsProviders = dialect.getStatisticsProviders();
final Execution execution = new Execution(star.getSchema().getInternalConnection().getInternalStatement(), 0);
for (StatisticsProvider statisticsProvider : statisticsProviders) {
rowCount = statisticsProvider.getColumnCardinality(dialect, star.getDataSource(), catalog, schema, table, column, execution);
if (rowCount >= 0) {
break;
}
}
// Note: If all providers fail, we put -1 into the cache, to ensure
// that we won't try again.
columnMap.put(key, rowCount);
}
return rowCount;
}
Aggregations