use of io.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class IncrementalIndexAdapter method getBitmapIndex.
@Override
public IndexedInts getBitmapIndex(String dimension, int index) {
DimensionAccessor accessor = accessors.get(dimension);
if (accessor == null) {
return EmptyIndexedInts.EMPTY_INDEXED_INTS;
}
ColumnCapabilities capabilities = accessor.dimensionDesc.getCapabilities();
DimensionIndexer indexer = accessor.dimensionDesc.getIndexer();
if (!capabilities.hasBitmapIndexes()) {
return EmptyIndexedInts.EMPTY_INDEXED_INTS;
}
final int id = (Integer) indexer.getUnsortedEncodedValueFromSorted(index);
if (id < 0 || id >= indexer.getCardinality()) {
return EmptyIndexedInts.EMPTY_INDEXED_INTS;
}
MutableBitmap bitmapIndex = accessor.invertedIndexes[id];
if (bitmapIndex == null) {
return EmptyIndexedInts.EMPTY_INDEXED_INTS;
}
return new BitmapIndexedInts(bitmapIndex);
}
use of io.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class IndexMerger method makeDimensionHandlers.
protected DimensionHandler[] makeDimensionHandlers(final List<String> mergedDimensions, final List<ColumnCapabilitiesImpl> dimCapabilities) {
final DimensionHandler[] handlers = new DimensionHandler[mergedDimensions.size()];
for (int i = 0; i < mergedDimensions.size(); i++) {
ColumnCapabilities capabilities = dimCapabilities.get(i);
String dimName = mergedDimensions.get(i);
handlers[i] = DimensionHandlerUtils.getHandlerFromCapabilities(dimName, capabilities, null);
}
return handlers;
}
use of io.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class TopNQueryEngine method getMapFn.
private Function<Cursor, Result<TopNResultValue>> getMapFn(TopNQuery query, final StorageAdapter adapter) {
final Capabilities capabilities = adapter.getCapabilities();
final String dimension = query.getDimensionSpec().getDimension();
final int cardinality = adapter.getDimensionCardinality(dimension);
int numBytesPerRecord = 0;
for (AggregatorFactory aggregatorFactory : query.getAggregatorSpecs()) {
numBytesPerRecord += aggregatorFactory.getMaxIntermediateSize();
}
final TopNAlgorithmSelector selector = new TopNAlgorithmSelector(cardinality, numBytesPerRecord);
query.initTopNAlgorithmSelector(selector);
final ColumnCapabilities columnCapabilities = query.getVirtualColumns().getColumnCapabilitiesWithFallback(adapter, dimension);
final TopNAlgorithm topNAlgorithm;
if (selector.isHasExtractionFn() && // that the column is of type long and single-value.
dimension.equals(Column.TIME_COLUMN_NAME)) {
// A special TimeExtractionTopNAlgorithm is required, since DimExtractionTopNAlgorithm
// currently relies on the dimension cardinality to support lexicographic sorting
topNAlgorithm = new TimeExtractionTopNAlgorithm(capabilities, query);
} else if (selector.isHasExtractionFn()) {
topNAlgorithm = new DimExtractionTopNAlgorithm(capabilities, query);
} else if (columnCapabilities != null && columnCapabilities.getType() != ValueType.STRING) {
// force non-Strings to use DimExtraction for now, do a typed PooledTopN later
topNAlgorithm = new DimExtractionTopNAlgorithm(capabilities, query);
} else if (selector.isAggregateAllMetrics()) {
topNAlgorithm = new PooledTopNAlgorithm(capabilities, query, bufferPool);
} else if (selector.isAggregateTopNMetricFirst() || query.getContextBoolean("doAggregateTopNMetricFirst", false)) {
topNAlgorithm = new AggregateTopNMetricFirstAlgorithm(capabilities, query, bufferPool);
} else {
topNAlgorithm = new PooledTopNAlgorithm(capabilities, query, bufferPool);
}
return new TopNMapFn(query, topNAlgorithm);
}
Aggregations