use of org.apache.druid.segment.vector.NoFilterVectorOffset in project druid by druid-io.
the class CardinalityVectorAggregatorTest method testAggregateDouble.
@Test
public void testAggregateDouble() {
final double[] values = { 1, 2, 2, 3, 3, 3, 0 };
final boolean[] nulls = NullHandling.replaceWithDefault() ? null : new boolean[] { false, false, false, false, false, false, true };
final CardinalityVectorAggregator aggregator = new CardinalityVectorAggregator(Collections.singletonList(new DoubleCardinalityVectorProcessor(new BaseDoubleVectorValueSelector(new NoFilterVectorOffset(values.length, 0, values.length)) {
@Override
public double[] getDoubleVector() {
return values;
}
@Nullable
@Override
public boolean[] getNullVector() {
return nulls;
}
})));
testAggregate(aggregator, values.length, NullHandling.replaceWithDefault() ? 4 : 3);
}
use of org.apache.druid.segment.vector.NoFilterVectorOffset in project druid by druid-io.
the class QueryableIndexCursorSequenceBuilder method buildVectorized.
public VectorCursor buildVectorized(final int vectorSize) {
// Sanity check - matches QueryableIndexStorageAdapter.canVectorize
Preconditions.checkState(!descending, "!descending");
final Map<String, BaseColumn> columnCache = new HashMap<>();
final Closer closer = Closer.create();
NumericColumn timestamps = null;
final int startOffset;
final int endOffset;
if (interval.getStartMillis() > minDataTimestamp) {
timestamps = (NumericColumn) index.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getColumn();
closer.register(timestamps);
startOffset = timeSearch(timestamps, interval.getStartMillis(), 0, index.getNumRows());
} else {
startOffset = 0;
}
if (interval.getEndMillis() <= maxDataTimestamp) {
if (timestamps == null) {
timestamps = (NumericColumn) index.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getColumn();
closer.register(timestamps);
}
endOffset = timeSearch(timestamps, interval.getEndMillis(), startOffset, index.getNumRows());
} else {
endOffset = index.getNumRows();
}
final VectorOffset baseOffset = filterBitmap == null ? new NoFilterVectorOffset(vectorSize, startOffset, endOffset) : new BitmapVectorOffset(vectorSize, filterBitmap, startOffset, endOffset);
// baseColumnSelectorFactory using baseOffset is the column selector for filtering.
final VectorColumnSelectorFactory baseColumnSelectorFactory = makeVectorColumnSelectorFactoryForOffset(columnCache, baseOffset, closer);
if (postFilter == null) {
return new QueryableIndexVectorCursor(baseColumnSelectorFactory, baseOffset, vectorSize, closer);
} else {
final VectorOffset filteredOffset = FilteredVectorOffset.create(baseOffset, baseColumnSelectorFactory, postFilter);
// Now create the cursor and column selector that will be returned to the caller.
//
// There is an inefficiency with how we do things here: this cursor (the one that will be provided to the
// caller) does share a columnCache with "baseColumnSelectorFactory", but it *doesn't* share vector data. This
// means that if the caller wants to read from a column that is also used for filtering, the underlying column
// object will get hit twice for some of the values (anything that matched the filter). This is probably most
// noticeable if it causes thrashing of decompression buffers due to out-of-order reads. I haven't observed
// this directly but it seems possible in principle.
// baseColumnSelectorFactory using baseOffset is the column selector for filtering.
final VectorColumnSelectorFactory filteredColumnSelectorFactory = makeVectorColumnSelectorFactoryForOffset(columnCache, filteredOffset, closer);
return new QueryableIndexVectorCursor(filteredColumnSelectorFactory, filteredOffset, vectorSize, closer);
}
}
use of org.apache.druid.segment.vector.NoFilterVectorOffset in project druid by druid-io.
the class BaseColumnarLongsBenchmark method setupFilters.
void setupFilters(int rows, double filteredRowCountPercentage, String filterDistribution) {
final int filteredRowCount = (int) Math.floor(rows * filteredRowCountPercentage);
if (filteredRowCount < rows) {
switch(filterDistribution) {
case "random":
setupRandomFilter(rows, filteredRowCount);
break;
case "contiguous-start":
offset = new SimpleAscendingOffset(rows);
vectorOffset = new NoFilterVectorOffset(VECTOR_SIZE, 0, filteredRowCount);
break;
case "contiguous-end":
offset = new SimpleAscendingOffset(rows);
vectorOffset = new NoFilterVectorOffset(VECTOR_SIZE, rows - filteredRowCount, rows);
break;
case "contiguous-bitmap-start":
setupContiguousBitmapFilter(rows, filteredRowCount, 0);
break;
case "contiguous-bitmap-end":
setupContiguousBitmapFilter(rows, filteredRowCount, rows - filteredRowCount);
break;
case "chunky-1000":
setupChunkyFilter(rows, filteredRowCount, 1000);
break;
case "chunky-10000":
setupChunkyFilter(rows, filteredRowCount, 10000);
break;
default:
throw new IllegalArgumentException("unknown filter distribution");
}
} else {
offset = new SimpleAscendingOffset(rows);
vectorOffset = new NoFilterVectorOffset(VECTOR_SIZE, 0, rows);
}
}
use of org.apache.druid.segment.vector.NoFilterVectorOffset in project druid by druid-io.
the class CardinalityVectorAggregatorTest method testAggregateFloat.
@Test
public void testAggregateFloat() {
final float[] values = { 1, 2, 2, 3, 3, 3, 0 };
final boolean[] nulls = NullHandling.replaceWithDefault() ? null : new boolean[] { false, false, false, false, false, false, true };
final CardinalityVectorAggregator aggregator = new CardinalityVectorAggregator(Collections.singletonList(new FloatCardinalityVectorProcessor(new BaseFloatVectorValueSelector(new NoFilterVectorOffset(values.length, 0, values.length)) {
@Override
public float[] getFloatVector() {
return values;
}
@Nullable
@Override
public boolean[] getNullVector() {
return nulls;
}
})));
testAggregate(aggregator, values.length, NullHandling.replaceWithDefault() ? 4 : 3);
}
use of org.apache.druid.segment.vector.NoFilterVectorOffset in project druid by druid-io.
the class CardinalityVectorAggregatorTest method testAggregateLong.
@Test
public void testAggregateLong() {
final long[] values = { 1, 2, 2, 3, 3, 3, 0 };
final boolean[] nulls = NullHandling.replaceWithDefault() ? null : new boolean[] { false, false, false, false, false, false, true };
final CardinalityVectorAggregator aggregator = new CardinalityVectorAggregator(Collections.singletonList(new LongCardinalityVectorProcessor(new BaseLongVectorValueSelector(new NoFilterVectorOffset(values.length, 0, values.length)) {
@Override
public long[] getLongVector() {
return values;
}
@Nullable
@Override
public boolean[] getNullVector() {
return nulls;
}
})));
testAggregate(aggregator, values.length, NullHandling.replaceWithDefault() ? 4 : 3);
}
Aggregations