use of mondrian.rolap.sql.SqlQuery in project mondrian by pentaho.
the class SegmentBuilder method toHeader.
/**
* Creates a SegmentHeader object describing the supplied
* Segment object.
*
* @param segment A segment object for which we want to generate
* a SegmentHeader.
* @return A SegmentHeader describing the supplied Segment object.
*/
public static SegmentHeader toHeader(Segment segment) {
final List<SegmentColumn> cc = SegmentBuilder.toConstrainedColumns(segment.predicates);
final List<String> cp = new ArrayList<String>();
StringBuilder buf = new StringBuilder();
for (StarPredicate compoundPredicate : segment.compoundPredicateList) {
buf.setLength(0);
SqlQuery query = new SqlQuery(segment.star.getSqlQueryDialect());
compoundPredicate.toSql(query, buf);
cp.add(buf.toString());
}
final RolapSchema schema = segment.star.getSchema();
return new SegmentHeader(schema.getName(), schema.getChecksum(), segment.measure.getCubeName(), segment.measure.getName(), cc, cp, segment.star.getFactTable().getAlias(), segment.constrainedColumnsBitKey, Collections.<SegmentColumn>emptyList());
}
use of mondrian.rolap.sql.SqlQuery in project mondrian by pentaho.
the class NumberSqlCompilerTest method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
Dialect dialect = mock(Dialect.class);
when(dialect.getDatabaseProduct()).thenReturn(Dialect.DatabaseProduct.MYSQL);
SqlQuery query = mock(SqlQuery.class);
when(query.getDialect()).thenReturn(dialect);
RolapNativeSql sql = new RolapNativeSql(query, null, null, null);
compiler = sql.new NumberSqlCompiler();
}
use of mondrian.rolap.sql.SqlQuery in project mondrian by pentaho.
the class AbstractQuerySpec method distinctGenerateSql.
/**
* Generates a SQL query to retrieve the values in this segment using
* an algorithm which converts distinct-aggregates to non-distinct
* aggregates over subqueries.
*
* @param outerSqlQuery Query to modify
* @param countOnly If true, only generate a single row: no need to
* generate a GROUP BY clause or put any constraining columns in the
* SELECT clause
* @return A map of aliases used in the inner query if grouping sets
* were enabled.
*/
protected Map<String, String> distinctGenerateSql(final SqlQuery outerSqlQuery, boolean countOnly) {
final Dialect dialect = outerSqlQuery.getDialect();
final Dialect.DatabaseProduct databaseProduct = dialect.getDatabaseProduct();
final Map<String, String> groupingSetsAliases = new HashMap<String, String>();
// Generate something like
//
// select d0, d1, count(m0)
// from (
// select distinct dim1.x as d0, dim2.y as d1, f.z as m0
// from f, dim1, dim2
// where dim1.k = f.k1
// and dim2.k = f.k2) as dummyname
// group by d0, d1
//
// or, if countOnly=true
//
// select count(m0)
// from (
// select distinct f.z as m0
// from f, dim1, dim2
// where dim1.k = f.k1
// and dim2.k = f.k2) as dummyname
final SqlQuery innerSqlQuery = newSqlQuery();
if (databaseProduct == Dialect.DatabaseProduct.GREENPLUM) {
innerSqlQuery.setDistinct(false);
} else {
innerSqlQuery.setDistinct(true);
}
// add constraining dimensions
RolapStar.Column[] columns = getColumns();
int arity = columns.length;
for (int i = 0; i < arity; i++) {
RolapStar.Column column = columns[i];
RolapStar.Table table = column.getTable();
if (table.isFunky()) {
// this is a funky dimension -- ignore for now
continue;
}
table.addToFrom(innerSqlQuery, false, true);
String expr = column.generateExprString(innerSqlQuery);
StarColumnPredicate predicate = getColumnPredicate(i);
final String where = RolapStar.Column.createInExpr(expr, predicate, column.getDatatype(), innerSqlQuery);
if (!where.equals("true")) {
innerSqlQuery.addWhere(where);
}
if (countOnly) {
continue;
}
String alias = "d" + i;
alias = innerSqlQuery.addSelect(expr, null, alias);
if (databaseProduct == Dialect.DatabaseProduct.GREENPLUM) {
innerSqlQuery.addGroupBy(expr, alias);
}
final String quotedAlias = dialect.quoteIdentifier(alias);
outerSqlQuery.addSelectGroupBy(quotedAlias, null);
// Add this alias to the map of grouping sets aliases
groupingSetsAliases.put(expr, dialect.quoteIdentifier("dummyname." + alias));
}
// add predicates not associated with columns
extraPredicates(innerSqlQuery);
// add measures
for (int i = 0, count = getMeasureCount(); i < count; i++) {
RolapStar.Measure measure = getMeasure(i);
Util.assertTrue(measure.getTable() == getStar().getFactTable());
measure.getTable().addToFrom(innerSqlQuery, false, true);
String alias = getMeasureAlias(i);
String expr = measure.generateExprString(outerSqlQuery);
innerSqlQuery.addSelect(expr, measure.getInternalType(), alias);
if (databaseProduct == Dialect.DatabaseProduct.GREENPLUM) {
innerSqlQuery.addGroupBy(expr, alias);
}
outerSqlQuery.addSelect(measure.getAggregator().getNonDistinctAggregator().getExpression(dialect.quoteIdentifier(alias)), measure.getInternalType());
}
outerSqlQuery.addFrom(innerSqlQuery, "dummyname", true);
return groupingSetsAliases;
}
use of mondrian.rolap.sql.SqlQuery in project mondrian by pentaho.
the class AggQuerySpec method generateSqlQuery.
public Pair<String, List<Type>> generateSqlQuery() {
SqlQuery sqlQuery = newSqlQuery();
generateSql(sqlQuery);
return sqlQuery.toSqlAndTypes();
}
use of mondrian.rolap.sql.SqlQuery in project mondrian by pentaho.
the class AggGen method insertIntoLost.
/**
* Return the sql code to populate a lost dimension table from the fact
* table.
*/
public String insertIntoLost() {
StringWriter sw = new StringWriter(512);
PrintWriter pw = new PrintWriter(sw);
String prefix = " ";
String factTableName = getFactTableName();
SqlQuery sqlQuery = getSqlQuery();
pw.print("INSERT INTO ");
pw.print(makeLostAggregateTableName(getFactTableName()));
pw.println(" (");
for (JdbcSchema.Table.Column.Usage usage : notLostColumnUsages) {
JdbcSchema.Table.Column c = usage.getColumn();
pw.print(prefix);
pw.print(c.getName());
pw.println(',');
}
for (JdbcSchema.Table.Column.Usage usage : measures) {
JdbcSchema.Table.Column c = usage.getColumn();
pw.print(prefix);
String name = getUsageName(usage);
pw.print(name);
pw.println(',');
}
// do fact_count
pw.print(prefix);
pw.print(getFactCount());
pw.println(")");
pw.println("SELECT");
for (JdbcSchema.Table.Column.Usage usage : notLostColumnUsages) {
JdbcSchema.Table.Column c = usage.getColumn();
pw.print(prefix);
pw.print(sqlQuery.getDialect().quoteIdentifier(factTableName, c.getName()));
pw.print(" AS ");
pw.print(sqlQuery.getDialect().quoteIdentifier(c.getName()));
pw.println(',');
}
for (JdbcSchema.Table.Column.Usage usage : measures) {
JdbcSchema.Table.Column c = usage.getColumn();
RolapAggregator agg = usage.getAggregator();
pw.print(prefix);
pw.print(agg.getExpression(sqlQuery.getDialect().quoteIdentifier(factTableName, c.getName())));
pw.print(" AS ");
pw.print(sqlQuery.getDialect().quoteIdentifier(c.getName()));
pw.println(',');
}
// do fact_count
pw.print(prefix);
pw.print("COUNT(*) AS ");
pw.println(sqlQuery.getDialect().quoteIdentifier(getFactCount()));
pw.println("FROM ");
pw.print(prefix);
pw.print(sqlQuery.getDialect().quoteIdentifier(factTableName));
pw.print(" ");
pw.println(sqlQuery.getDialect().quoteIdentifier(factTableName));
pw.println("GROUP BY ");
int k = 0;
for (JdbcSchema.Table.Column.Usage notLostColumnUsage : notLostColumnUsages) {
if (k++ > 0) {
pw.println(",");
}
JdbcSchema.Table.Column.Usage usage = notLostColumnUsage;
JdbcSchema.Table.Column c = usage.getColumn();
pw.print(prefix);
pw.print(sqlQuery.getDialect().quoteIdentifier(factTableName, c.getName()));
}
pw.println(';');
return sw.toString();
}
Aggregations