use of io.druid.segment.ColumnSelectorFactory in project druid by druid-io.
the class DoubleFirstAggregatorFactory method getCombiningFactory.
@Override
public AggregatorFactory getCombiningFactory() {
return new DoubleFirstAggregatorFactory(name, name) {
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory) {
final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name);
return new DoubleFirstAggregator(name, null, null) {
@Override
public void aggregate() {
SerializablePair<Long, Double> pair = (SerializablePair<Long, Double>) selector.get();
if (pair.lhs < firstTime) {
firstTime = pair.lhs;
firstValue = pair.rhs;
}
}
};
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) {
final ObjectColumnSelector selector = metricFactory.makeObjectColumnSelector(name);
return new DoubleFirstBufferAggregator(null, null) {
@Override
public void aggregate(ByteBuffer buf, int position) {
SerializablePair<Long, Double> pair = (SerializablePair<Long, Double>) selector.get();
long firstTime = buf.getLong(position);
if (pair.lhs < firstTime) {
buf.putLong(position, pair.lhs);
buf.putDouble(position + Longs.BYTES, pair.rhs);
}
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector) {
inspector.visit("selector", selector);
}
};
}
};
}
use of io.druid.segment.ColumnSelectorFactory in project druid by druid-io.
the class BaseFilterTest method selectColumnValuesMatchingFilterUsingPostFiltering.
private List<String> selectColumnValuesMatchingFilterUsingPostFiltering(final DimFilter filter, final String selectColumn) {
final Filter theFilter = makeFilter(filter);
final Filter postFilteringFilter = new Filter() {
@Override
public ImmutableBitmap getBitmapIndex(BitmapIndexSelector selector) {
throw new UnsupportedOperationException();
}
@Override
public ValueMatcher makeMatcher(ColumnSelectorFactory factory) {
return theFilter.makeMatcher(factory);
}
@Override
public boolean supportsBitmapIndex(BitmapIndexSelector selector) {
return false;
}
@Override
public boolean supportsSelectivityEstimation(ColumnSelector columnSelector, BitmapIndexSelector indexSelector) {
return false;
}
@Override
public double estimateSelectivity(BitmapIndexSelector indexSelector) {
return 1.0;
}
};
final Sequence<Cursor> cursors = makeCursorSequence(postFilteringFilter);
Sequence<List<String>> seq = Sequences.map(cursors, new Function<Cursor, List<String>>() {
@Override
public List<String> apply(Cursor input) {
final DimensionSelector selector = input.makeDimensionSelector(new DefaultDimensionSpec(selectColumn, selectColumn));
final List<String> values = Lists.newArrayList();
while (!input.isDone()) {
IndexedInts row = selector.getRow();
Preconditions.checkState(row.size() == 1);
values.add(selector.lookupName(row.get(0)));
input.advance();
}
return values;
}
});
return Sequences.toList(seq, new ArrayList<List<String>>()).get(0);
}
use of io.druid.segment.ColumnSelectorFactory in project druid by druid-io.
the class IncrementalIndex method makeColumnSelectorFactory.
/**
* Column selector used at ingestion time for inputs to aggregators.
*
* @param agg the aggregator
* @param in ingestion-time input row supplier
* @param deserializeComplexMetrics whether complex objects should be deserialized by a {@link ComplexMetricExtractor}
*
* @return column selector factory
*/
public static ColumnSelectorFactory makeColumnSelectorFactory(final VirtualColumns virtualColumns, final AggregatorFactory agg, final Supplier<InputRow> in, final boolean deserializeComplexMetrics) {
final RowBasedColumnSelectorFactory baseSelectorFactory = RowBasedColumnSelectorFactory.create(in, null);
class IncrementalIndexInputRowColumnSelectorFactory implements ColumnSelectorFactory {
@Override
public LongColumnSelector makeLongColumnSelector(final String columnName) {
return baseSelectorFactory.makeLongColumnSelector(columnName);
}
@Override
public FloatColumnSelector makeFloatColumnSelector(final String columnName) {
return baseSelectorFactory.makeFloatColumnSelector(columnName);
}
@Override
public ObjectColumnSelector makeObjectColumnSelector(final String column) {
final String typeName = agg.getTypeName();
final ObjectColumnSelector rawColumnSelector = baseSelectorFactory.makeObjectColumnSelector(column);
if ((Enums.getIfPresent(ValueType.class, typeName.toUpperCase()).isPresent() && !typeName.equalsIgnoreCase(ValueType.COMPLEX.name())) || !deserializeComplexMetrics) {
return rawColumnSelector;
} else {
final ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);
if (serde == null) {
throw new ISE("Don't know how to handle type[%s]", typeName);
}
final ComplexMetricExtractor extractor = serde.getExtractor();
return new ObjectColumnSelector() {
@Override
public Class classOfObject() {
return extractor.extractedClass();
}
@Override
public Object get() {
return extractor.extractValue(in.get(), column);
}
};
}
}
@Override
public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) {
return baseSelectorFactory.makeDimensionSelector(dimensionSpec);
}
@Nullable
@Override
public ColumnCapabilities getColumnCapabilities(String columnName) {
return baseSelectorFactory.getColumnCapabilities(columnName);
}
}
return virtualColumns.wrap(new IncrementalIndexInputRowColumnSelectorFactory());
}
use of io.druid.segment.ColumnSelectorFactory in project druid by druid-io.
the class OffheapIncrementalIndex method initAggs.
@Override
protected BufferAggregator[] initAggs(AggregatorFactory[] metrics, Supplier<InputRow> rowSupplier, boolean deserializeComplexMetrics) {
selectors = Maps.newHashMap();
aggOffsetInBuffer = new int[metrics.length];
for (int i = 0; i < metrics.length; i++) {
AggregatorFactory agg = metrics[i];
ColumnSelectorFactory columnSelectorFactory = makeColumnSelectorFactory(agg, rowSupplier, deserializeComplexMetrics);
selectors.put(agg.getName(), new OnheapIncrementalIndex.ObjectCachingColumnSelectorFactory(columnSelectorFactory));
if (i == 0) {
aggOffsetInBuffer[i] = 0;
} else {
aggOffsetInBuffer[i] = aggOffsetInBuffer[i - 1] + metrics[i - 1].getMaxIntermediateSize();
}
}
aggsTotalSize = aggOffsetInBuffer[metrics.length - 1] + metrics[metrics.length - 1].getMaxIntermediateSize();
return new BufferAggregator[metrics.length];
}
Aggregations