use of org.apache.druid.query.dimension.DimensionSpec in project druid by druid-io.
the class FilteredAggregatorTest method makeColumnSelector.
private ColumnSelectorFactory makeColumnSelector(final TestFloatColumnSelector selector) {
return new ColumnSelectorFactory() {
@Override
public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) {
final String dimensionName = dimensionSpec.getDimension();
if ("dim".equals(dimensionName)) {
return dimensionSpec.decorate(new AbstractDimensionSelector() {
@Override
public IndexedInts getRow() {
SingleIndexedInt row = new SingleIndexedInt();
if (selector.getIndex() % 3 == 2) {
row.setValue(1);
} else {
row.setValue(0);
}
return row;
}
@Override
public ValueMatcher makeValueMatcher(String value) {
return DimensionSelectorUtils.makeValueMatcherGeneric(this, value);
}
@Override
public ValueMatcher makeValueMatcher(Predicate<String> predicate) {
return DimensionSelectorUtils.makeValueMatcherGeneric(this, predicate);
}
@Override
public int getValueCardinality() {
return 2;
}
@Override
public String lookupName(int id) {
switch(id) {
case 0:
return "a";
case 1:
return "b";
default:
throw new IllegalArgumentException();
}
}
@Override
public boolean nameLookupPossibleInAdvance() {
return true;
}
@Nullable
@Override
public IdLookup idLookup() {
return new IdLookup() {
@Override
public int lookupId(String name) {
switch(name) {
case "a":
return 0;
case "b":
return 1;
default:
throw new IllegalArgumentException();
}
}
};
}
@Override
public Class classOfObject() {
return Object.class;
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector) {
// Don't care about runtime shape in tests
}
});
} else {
throw new UnsupportedOperationException();
}
}
@Override
public ColumnValueSelector<?> makeColumnValueSelector(String columnName) {
if ("value".equals(columnName)) {
return selector;
} else {
throw new UnsupportedOperationException();
}
}
@Override
public ColumnCapabilities getColumnCapabilities(String columnName) {
ColumnCapabilitiesImpl caps;
if ("value".equals(columnName)) {
caps = new ColumnCapabilitiesImpl();
caps.setType(ColumnType.FLOAT);
caps.setDictionaryEncoded(false);
caps.setHasBitmapIndexes(false);
} else {
caps = new ColumnCapabilitiesImpl();
caps.setType(ColumnType.STRING);
caps.setDictionaryEncoded(true);
caps.setHasBitmapIndexes(true);
}
return caps;
}
};
}
use of org.apache.druid.query.dimension.DimensionSpec in project druid by druid-io.
the class Queries method computeRequiredColumns.
/**
* Helper for implementations of {@link Query#getRequiredColumns()}. Returns the list of columns that will be read
* out of a datasource by a query that uses the provided objects in the usual way.
*
* The returned set always contains {@code __time}, no matter what.
*
* If the virtual columns, filter, dimensions, aggregators, or additional columns refer to a virtual column, then the
* inputs of the virtual column will be returned instead of the name of the virtual column itself. Therefore, the
* returned list will never contain the names of any virtual columns.
*
* @param virtualColumns virtual columns whose inputs should be included.
* @param filter optional filter whose inputs should be included.
* @param dimensions dimension specs whose inputs should be included.
* @param aggregators aggregators whose inputs should be included.
* @param additionalColumns additional columns to include. Each of these will be added to the returned set, unless it
* refers to a virtual column, in which case the virtual column inputs will be added instead.
*/
public static Set<String> computeRequiredColumns(final VirtualColumns virtualColumns, @Nullable final DimFilter filter, final List<DimensionSpec> dimensions, final List<AggregatorFactory> aggregators, final List<String> additionalColumns) {
final Set<String> requiredColumns = new HashSet<>();
// Everyone needs __time (it's used by intervals filters).
requiredColumns.add(ColumnHolder.TIME_COLUMN_NAME);
for (VirtualColumn virtualColumn : virtualColumns.getVirtualColumns()) {
for (String column : virtualColumn.requiredColumns()) {
if (!virtualColumns.exists(column)) {
requiredColumns.addAll(virtualColumn.requiredColumns());
}
}
}
if (filter != null) {
for (String column : filter.getRequiredColumns()) {
if (!virtualColumns.exists(column)) {
requiredColumns.add(column);
}
}
}
for (DimensionSpec dimensionSpec : dimensions) {
if (!virtualColumns.exists(dimensionSpec.getDimension())) {
requiredColumns.add(dimensionSpec.getDimension());
}
}
for (AggregatorFactory aggregator : aggregators) {
for (String column : aggregator.requiredFields()) {
if (!virtualColumns.exists(column)) {
requiredColumns.add(column);
}
}
}
for (String column : additionalColumns) {
if (!virtualColumns.exists(column)) {
requiredColumns.add(column);
}
}
return requiredColumns;
}
use of org.apache.druid.query.dimension.DimensionSpec in project druid by druid-io.
the class GroupByQuery method compareDims.
private int compareDims(List<DimensionSpec> dimensions, ResultRow lhs, ResultRow rhs) {
final int dimensionStart = getResultRowDimensionStart();
for (int i = 0; i < dimensions.size(); i++) {
DimensionSpec dimension = dimensions.get(i);
final int dimCompare = DimensionHandlerUtils.compareObjectsAsType(lhs.get(dimensionStart + i), rhs.get(dimensionStart + i), dimension.getOutputType());
if (dimCompare != 0) {
return dimCompare;
}
}
return 0;
}
use of org.apache.druid.query.dimension.DimensionSpec in project druid by druid-io.
the class GroupByQuery method getRowOrderingForPushDown.
/**
* When limit push down is applied, the partial results would be sorted by the ordering specified by the
* limit/order spec (unlike non-push down case where the results always use the default natural ascending order),
* so when merging these partial result streams, the merge needs to use the same ordering to get correct results.
*/
private Ordering<ResultRow> getRowOrderingForPushDown(final boolean granular, final DefaultLimitSpec limitSpec) {
final boolean sortByDimsFirst = getContextSortByDimsFirst();
final IntList orderedFieldNumbers = new IntArrayList();
final Set<Integer> dimsInOrderBy = new HashSet<>();
final List<Boolean> needsReverseList = new ArrayList<>();
final List<ColumnType> dimensionTypes = new ArrayList<>();
final List<StringComparator> comparators = new ArrayList<>();
for (OrderByColumnSpec orderSpec : limitSpec.getColumns()) {
boolean needsReverse = orderSpec.getDirection() != OrderByColumnSpec.Direction.ASCENDING;
int dimIndex = OrderByColumnSpec.getDimIndexForOrderBy(orderSpec, dimensions);
if (dimIndex >= 0) {
DimensionSpec dim = dimensions.get(dimIndex);
orderedFieldNumbers.add(resultRowSignature.indexOf(dim.getOutputName()));
dimsInOrderBy.add(dimIndex);
needsReverseList.add(needsReverse);
final ColumnType type = dimensions.get(dimIndex).getOutputType();
dimensionTypes.add(type);
comparators.add(orderSpec.getDimensionComparator());
}
}
for (int i = 0; i < dimensions.size(); i++) {
if (!dimsInOrderBy.contains(i)) {
orderedFieldNumbers.add(resultRowSignature.indexOf(dimensions.get(i).getOutputName()));
needsReverseList.add(false);
final ColumnType type = dimensions.get(i).getOutputType();
dimensionTypes.add(type);
comparators.add(StringComparators.LEXICOGRAPHIC);
}
}
final Comparator<ResultRow> timeComparator = getTimeComparator(granular);
if (timeComparator == null) {
return Ordering.from((lhs, rhs) -> compareDimsForLimitPushDown(orderedFieldNumbers, needsReverseList, dimensionTypes, comparators, lhs, rhs));
} else if (sortByDimsFirst) {
return Ordering.from((lhs, rhs) -> {
final int cmp = compareDimsForLimitPushDown(orderedFieldNumbers, needsReverseList, dimensionTypes, comparators, lhs, rhs);
if (cmp != 0) {
return cmp;
}
return timeComparator.compare(lhs, rhs);
});
} else {
return Ordering.from((lhs, rhs) -> {
final int timeCompare = timeComparator.compare(lhs, rhs);
if (timeCompare != 0) {
return timeCompare;
}
return compareDimsForLimitPushDown(orderedFieldNumbers, needsReverseList, dimensionTypes, comparators, lhs, rhs);
});
}
}
use of org.apache.druid.query.dimension.DimensionSpec in project druid by druid-io.
the class GroupByQueryQueryToolChest method makePostComputeManipulatorFn.
@Override
public Function<ResultRow, ResultRow> makePostComputeManipulatorFn(final GroupByQuery query, final MetricManipulationFn fn) {
final BitSet optimizedDims = extractionsToRewrite(query);
final Function<ResultRow, ResultRow> preCompute = makePreComputeManipulatorFn(query, fn);
if (optimizedDims.isEmpty()) {
return preCompute;
}
// If we have optimizations that can be done at this level, we apply them here
final List<DimensionSpec> dimensions = query.getDimensions();
final List<ExtractionFn> extractionFns = new ArrayList<>(dimensions.size());
for (int i = 0; i < dimensions.size(); i++) {
final DimensionSpec dimensionSpec = dimensions.get(i);
final ExtractionFn extractionFnToAdd;
if (optimizedDims.get(i)) {
extractionFnToAdd = dimensionSpec.getExtractionFn();
} else {
extractionFnToAdd = null;
}
extractionFns.add(extractionFnToAdd);
}
final int dimensionStart = query.getResultRowDimensionStart();
return row -> {
// preCompute.apply(row) will either return the original row, or create a copy.
ResultRow newRow = preCompute.apply(row);
// noinspection ObjectEquality (if preCompute made a copy, no need to make another copy)
if (newRow == row) {
newRow = row.copy();
}
for (int i = optimizedDims.nextSetBit(0); i >= 0; i = optimizedDims.nextSetBit(i + 1)) {
newRow.set(dimensionStart + i, extractionFns.get(i).apply(newRow.get(dimensionStart + i)));
}
return newRow;
};
}
Aggregations