use of io.questdb.griffin.engine.analytic.CachedAnalyticRecordCursorFactory in project questdb by bluestreak01.
the class SqlCodeGenerator method generateSelectAnalytic.
private RecordCursorFactory generateSelectAnalytic(QueryModel model, SqlExecutionContext executionContext) throws SqlException {
final RecordCursorFactory base = generateSubQuery(model, executionContext);
final RecordMetadata baseMetadata = base.getMetadata();
final ObjList<QueryColumn> columns = model.getColumns();
final int columnCount = columns.size();
grouppedAnalytic.clear();
ObjList<AnalyticFunction> naturalOrderFunctions = null;
valueTypes.clear();
ArrayColumnTypes chainTypes = valueTypes;
GenericRecordMetadata chainMetadata = new GenericRecordMetadata();
GenericRecordMetadata factoryMetadata = new GenericRecordMetadata();
listColumnFilterA.clear();
listColumnFilterB.clear();
// we need two passes over columns because partitionBy and orderBy clauses of
// the analytical function must reference the metadata of "this" factory.
// pass #1 assembles metadata of non-analytic columns
// set of column indexes in the base metadata that has already been added to the main
// metadata instance
// todo: reuse this set
IntHashSet columnSet = new IntHashSet();
for (int i = 0; i < columnCount; i++) {
final QueryColumn qc = columns.getQuick(i);
if (!(qc instanceof AnalyticColumn)) {
final int columnIndex = baseMetadata.getColumnIndexQuiet(qc.getAst().token);
final TableColumnMetadata m = BaseRecordMetadata.copyOf(baseMetadata, columnIndex);
chainMetadata.add(i, m);
factoryMetadata.add(i, m);
chainTypes.add(i, m.getType());
listColumnFilterA.extendAndSet(i, i + 1);
listColumnFilterB.extendAndSet(i, columnIndex);
columnSet.add(columnIndex);
}
}
// pass #2 - add remaining base metadata column that are not in columnSet already
// we need to pay attention to stepping over analytic column slots
// Chain metadata is assembled in such way that all columns the factory
// needs to provide are at the beginning of the metadata so the record the factory cursor
// returns can be chain record, because it chain record is always longer than record needed out of the
// cursor and relevant columns are 0..n limited by factory metadata
int addAt = columnCount;
for (int i = 0, n = baseMetadata.getColumnCount(); i < n; i++) {
if (columnSet.excludes(i)) {
final TableColumnMetadata m = BaseRecordMetadata.copyOf(baseMetadata, i);
chainMetadata.add(addAt, m);
chainTypes.add(addAt, m.getType());
listColumnFilterA.extendAndSet(addAt, addAt + 1);
listColumnFilterB.extendAndSet(addAt, i);
addAt++;
}
}
// pass #3 assembles analytic column metadata into a list
// not main metadata to avoid partitionBy functions accidentally looking up
// analytic columns recursively
// todo: these ar transient list, we can cache and reuse
final ObjList<TableColumnMetadata> deferredAnalyticMetadata = new ObjList<>();
for (int i = 0; i < columnCount; i++) {
final QueryColumn qc = columns.getQuick(i);
if (qc instanceof AnalyticColumn) {
final AnalyticColumn ac = (AnalyticColumn) qc;
final ExpressionNode ast = qc.getAst();
if (ast.paramCount > 1) {
throw SqlException.$(ast.position, "Too many arguments");
}
ObjList<Function> partitionBy = null;
int psz = ac.getPartitionBy().size();
if (psz > 0) {
partitionBy = new ObjList<>(psz);
for (int j = 0; j < psz; j++) {
partitionBy.add(functionParser.parseFunction(ac.getPartitionBy().getQuick(j), chainMetadata, executionContext));
}
}
final VirtualRecord partitionByRecord;
final RecordSink partitionBySink;
if (partitionBy != null) {
partitionByRecord = new VirtualRecord(partitionBy);
keyTypes.clear();
final int partitionByCount = partitionBy.size();
for (int j = 0; j < partitionByCount; j++) {
keyTypes.add(partitionBy.getQuick(j).getType());
}
entityColumnFilter.of(partitionByCount);
// create sink
partitionBySink = RecordSinkFactory.getInstance(asm, keyTypes, entityColumnFilter, false);
} else {
partitionByRecord = null;
partitionBySink = null;
}
final int osz = ac.getOrderBy().size();
executionContext.configureAnalyticContext(partitionByRecord, partitionBySink, keyTypes, osz > 0, base.recordCursorSupportsRandomAccess());
final Function f = functionParser.parseFunction(ac.getAst(), baseMetadata, executionContext);
// todo: throw an error when non-analytic function is called in analytic context
assert f instanceof AnalyticFunction;
AnalyticFunction analyticFunction = (AnalyticFunction) f;
// analyze order by clause on the current model and optimise out
// order by on analytic function if it matches the one on the model
final LowerCaseCharSequenceIntHashMap orderHash = model.getOrderHash();
boolean dismissOrder;
if (osz > 0 && orderHash.size() > 0) {
dismissOrder = true;
for (int j = 0; j < osz; j++) {
ExpressionNode node = ac.getOrderBy().getQuick(j);
int direction = ac.getOrderByDirection().getQuick(j);
if (orderHash.get(node.token) != direction) {
dismissOrder = false;
break;
}
}
} else {
dismissOrder = false;
}
if (osz > 0 && !dismissOrder) {
IntList order = toOrderIndices(chainMetadata, ac.getOrderBy(), ac.getOrderByDirection());
ObjList<AnalyticFunction> funcs = grouppedAnalytic.get(order);
if (funcs == null) {
grouppedAnalytic.put(order, funcs = new ObjList<>());
}
funcs.add(analyticFunction);
} else {
if (naturalOrderFunctions == null) {
naturalOrderFunctions = new ObjList<>();
}
naturalOrderFunctions.add(analyticFunction);
}
analyticFunction.setColumnIndex(i);
deferredAnalyticMetadata.extendAndSet(i, new TableColumnMetadata(Chars.toString(qc.getAlias()), // transient column hash is 0
0, analyticFunction.getType(), false, 0, false, null));
listColumnFilterA.extendAndSet(i, -i - 1);
}
}
// after all columns are processed we can re-insert deferred metadata
for (int i = 0, n = deferredAnalyticMetadata.size(); i < n; i++) {
TableColumnMetadata m = deferredAnalyticMetadata.getQuick(i);
if (m != null) {
chainTypes.add(i, m.getType());
factoryMetadata.add(i, m);
}
}
final ObjList<RecordComparator> analyticComparators = new ObjList<>(grouppedAnalytic.size());
final ObjList<ObjList<AnalyticFunction>> functionGroups = new ObjList<>(grouppedAnalytic.size());
for (ObjObjHashMap.Entry<IntList, ObjList<AnalyticFunction>> e : grouppedAnalytic) {
analyticComparators.add(recordComparatorCompiler.compile(chainTypes, e.key));
functionGroups.add(e.value);
}
final RecordSink recordSink = RecordSinkFactory.getInstance(asm, chainTypes, listColumnFilterA, false, listColumnFilterB);
return new CachedAnalyticRecordCursorFactory(configuration, base, recordSink, factoryMetadata, chainTypes, analyticComparators, functionGroups, naturalOrderFunctions);
}
Aggregations