use of io.druid.query.ordering.StringComparator in project druid by druid-io.
the class GroupByRules method toLimitSpec.
public static DefaultLimitSpec toLimitSpec(final List<String> rowOrder, final Sort sort) {
final Integer limit = sort.fetch != null ? RexLiteral.intValue(sort.fetch) : null;
final List<OrderByColumnSpec> orderBys = Lists.newArrayListWithCapacity(sort.getChildExps().size());
if (sort.offset != null) {
// LimitSpecs don't accept offsets.
return null;
}
// Extract orderBy column specs.
for (int sortKey = 0; sortKey < sort.getChildExps().size(); sortKey++) {
final RexNode sortExpression = sort.getChildExps().get(sortKey);
final RelFieldCollation collation = sort.getCollation().getFieldCollations().get(sortKey);
final OrderByColumnSpec.Direction direction;
final StringComparator comparator;
if (collation.getDirection() == RelFieldCollation.Direction.ASCENDING) {
direction = OrderByColumnSpec.Direction.ASCENDING;
} else if (collation.getDirection() == RelFieldCollation.Direction.DESCENDING) {
direction = OrderByColumnSpec.Direction.DESCENDING;
} else {
throw new ISE("WTF?! Don't know what to do with direction[%s]", collation.getDirection());
}
final SqlTypeName sortExpressionType = sortExpression.getType().getSqlTypeName();
if (SqlTypeName.NUMERIC_TYPES.contains(sortExpressionType) || SqlTypeName.TIMESTAMP == sortExpressionType || SqlTypeName.DATE == sortExpressionType) {
comparator = StringComparators.NUMERIC;
} else {
comparator = StringComparators.LEXICOGRAPHIC;
}
if (sortExpression.isA(SqlKind.INPUT_REF)) {
final RexInputRef ref = (RexInputRef) sortExpression;
final String fieldName = rowOrder.get(ref.getIndex());
orderBys.add(new OrderByColumnSpec(fieldName, direction, comparator));
} else {
// We don't support sorting by anything other than refs which actually appear in the query result.
return null;
}
}
return new DefaultLimitSpec(orderBys, limit);
}
use of io.druid.query.ordering.StringComparator in project druid by druid-io.
the class Expressions method toLeafFilter.
/**
* Translates "condition" to a Druid filter, assuming it does not contain any boolean expressions. Returns null
* if we cannot translate the condition.
*
* @param plannerContext planner context
* @param rowSignature row signature of the dataSource to be filtered
* @param expression Calcite row expression
*/
private static DimFilter toLeafFilter(final DruidOperatorTable operatorTable, final PlannerContext plannerContext, final RowSignature rowSignature, final RexNode expression) {
if (expression.isAlwaysTrue()) {
return Filtration.matchEverything();
} else if (expression.isAlwaysFalse()) {
return Filtration.matchNothing();
}
final SqlKind kind = expression.getKind();
if (kind == SqlKind.LIKE) {
final List<RexNode> operands = ((RexCall) expression).getOperands();
final RowExtraction rex = toRowExtraction(operatorTable, plannerContext, rowSignature.getRowOrder(), operands.get(0));
if (rex == null || !rex.isFilterable(rowSignature)) {
return null;
}
return new LikeDimFilter(rex.getColumn(), RexLiteral.stringValue(operands.get(1)), operands.size() > 2 ? RexLiteral.stringValue(operands.get(2)) : null, rex.getExtractionFn());
} else if (kind == SqlKind.EQUALS || kind == SqlKind.NOT_EQUALS || kind == SqlKind.GREATER_THAN || kind == SqlKind.GREATER_THAN_OR_EQUAL || kind == SqlKind.LESS_THAN || kind == SqlKind.LESS_THAN_OR_EQUAL) {
final List<RexNode> operands = ((RexCall) expression).getOperands();
Preconditions.checkState(operands.size() == 2, "WTF?! Expected 2 operands, got[%,d]", operands.size());
boolean flip = false;
RexNode lhs = operands.get(0);
RexNode rhs = operands.get(1);
if (lhs.getKind() == SqlKind.LITERAL && rhs.getKind() != SqlKind.LITERAL) {
// swap lhs, rhs
RexNode x = lhs;
lhs = rhs;
rhs = x;
flip = true;
}
// rhs must be a literal
if (rhs.getKind() != SqlKind.LITERAL) {
return null;
}
// lhs must be translatable to a RowExtraction to be filterable
final RowExtraction rex = toRowExtraction(operatorTable, plannerContext, rowSignature.getRowOrder(), lhs);
if (rex == null || !rex.isFilterable(rowSignature)) {
return null;
}
final String column = rex.getColumn();
final ExtractionFn extractionFn = rex.getExtractionFn();
if (column.equals(Column.TIME_COLUMN_NAME) && extractionFn instanceof TimeFormatExtractionFn) {
// Check if we can strip the extractionFn and convert the filter to a direct filter on __time.
// This allows potential conversion to query-level "intervals" later on, which is ideal for Druid queries.
final Granularity granularity = ExtractionFns.toQueryGranularity(extractionFn);
if (granularity != null) {
// lhs is FLOOR(__time TO granularity); rhs must be a timestamp
final long rhsMillis = toMillisLiteral(rhs, plannerContext.getTimeZone());
final Interval rhsInterval = granularity.bucket(new DateTime(rhsMillis));
// Is rhs aligned on granularity boundaries?
final boolean rhsAligned = rhsInterval.getStartMillis() == rhsMillis;
// Create a BoundRefKey that strips the extractionFn and compares __time as a number.
final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC);
if (kind == SqlKind.EQUALS) {
return rhsAligned ? Bounds.interval(boundRefKey, rhsInterval) : Filtration.matchNothing();
} else if (kind == SqlKind.NOT_EQUALS) {
return rhsAligned ? new NotDimFilter(Bounds.interval(boundRefKey, rhsInterval)) : Filtration.matchEverything();
} else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) {
return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) {
return rhsAligned ? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getStartMillis())) : Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) {
return rhsAligned ? Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getStartMillis())) : Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) {
return Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else {
throw new IllegalStateException("WTF?! Shouldn't have got here...");
}
}
}
final String val;
final RexLiteral rhsLiteral = (RexLiteral) rhs;
if (SqlTypeName.NUMERIC_TYPES.contains(rhsLiteral.getTypeName())) {
val = String.valueOf(RexLiteral.value(rhsLiteral));
} else if (SqlTypeName.CHAR_TYPES.contains(rhsLiteral.getTypeName())) {
val = String.valueOf(RexLiteral.stringValue(rhsLiteral));
} else if (SqlTypeName.TIMESTAMP == rhsLiteral.getTypeName() || SqlTypeName.DATE == rhsLiteral.getTypeName()) {
val = String.valueOf(toMillisLiteral(rhsLiteral, plannerContext.getTimeZone()));
} else {
// Don't know how to filter on this kind of literal.
return null;
}
// Numeric lhs needs a numeric comparison.
final boolean lhsIsNumeric = SqlTypeName.NUMERIC_TYPES.contains(lhs.getType().getSqlTypeName()) || SqlTypeName.TIMESTAMP == lhs.getType().getSqlTypeName() || SqlTypeName.DATE == lhs.getType().getSqlTypeName();
final StringComparator comparator = lhsIsNumeric ? StringComparators.NUMERIC : StringComparators.LEXICOGRAPHIC;
final BoundRefKey boundRefKey = new BoundRefKey(column, extractionFn, comparator);
final DimFilter filter;
// Always use BoundDimFilters, to simplify filter optimization later (it helps to remember the comparator).
if (kind == SqlKind.EQUALS) {
filter = Bounds.equalTo(boundRefKey, val);
} else if (kind == SqlKind.NOT_EQUALS) {
filter = new NotDimFilter(Bounds.equalTo(boundRefKey, val));
} else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) {
filter = Bounds.greaterThan(boundRefKey, val);
} else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) {
filter = Bounds.greaterThanOrEqualTo(boundRefKey, val);
} else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) {
filter = Bounds.lessThan(boundRefKey, val);
} else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) {
filter = Bounds.lessThanOrEqualTo(boundRefKey, val);
} else {
throw new IllegalStateException("WTF?! Shouldn't have got here...");
}
return filter;
} else {
return null;
}
}
use of io.druid.query.ordering.StringComparator in project druid by druid-io.
the class ConvertBoundsToSelectors method process.
@Override
public DimFilter process(DimFilter filter) {
if (filter instanceof BoundDimFilter) {
final BoundDimFilter bound = (BoundDimFilter) filter;
final StringComparator naturalStringComparator = sourceRowSignature.naturalStringComparator(RowExtraction.of(bound.getDimension(), bound.getExtractionFn()));
if (bound.hasUpperBound() && bound.hasLowerBound() && bound.getUpper().equals(bound.getLower()) && !bound.isUpperStrict() && !bound.isLowerStrict() && bound.getOrdering().equals(naturalStringComparator)) {
return new SelectorDimFilter(bound.getDimension(), bound.getUpper(), bound.getExtractionFn());
} else {
return filter;
}
} else {
return filter;
}
}
use of io.druid.query.ordering.StringComparator in project druid by druid-io.
the class DefaultLimitSpec method build.
@Override
public Function<Sequence<Row>, Sequence<Row>> build(List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs) {
// Can avoid re-sorting if the natural ordering is good enough.
boolean sortingNeeded = false;
if (dimensions.size() < columns.size()) {
sortingNeeded = true;
}
final Set<String> aggAndPostAggNames = Sets.newHashSet();
for (AggregatorFactory agg : aggs) {
aggAndPostAggNames.add(agg.getName());
}
for (PostAggregator postAgg : postAggs) {
aggAndPostAggNames.add(postAgg.getName());
}
if (!sortingNeeded) {
for (int i = 0; i < columns.size(); i++) {
final OrderByColumnSpec columnSpec = columns.get(i);
if (aggAndPostAggNames.contains(columnSpec.getDimension())) {
sortingNeeded = true;
break;
}
final ValueType columnType = getOrderByType(columnSpec, dimensions);
final StringComparator naturalComparator;
if (columnType == ValueType.STRING) {
naturalComparator = StringComparators.LEXICOGRAPHIC;
} else if (columnType == ValueType.LONG || columnType == ValueType.FLOAT) {
naturalComparator = StringComparators.NUMERIC;
} else {
sortingNeeded = true;
break;
}
if (columnSpec.getDirection() != OrderByColumnSpec.Direction.ASCENDING || !columnSpec.getDimensionComparator().equals(naturalComparator) || !columnSpec.getDimension().equals(dimensions.get(i).getOutputName())) {
sortingNeeded = true;
break;
}
}
}
if (!sortingNeeded) {
return limit == Integer.MAX_VALUE ? Functions.<Sequence<Row>>identity() : new LimitingFn(limit);
}
// Materialize the Comparator first for fast-fail error checking.
final Ordering<Row> ordering = makeComparator(dimensions, aggs, postAggs);
if (limit == Integer.MAX_VALUE) {
return new SortingFn(ordering);
} else {
return new TopNFunction(ordering, limit);
}
}
Aggregations