use of org.apache.druid.query.filter.vector.ReadableVectorMatch in project druid by druid-io.
the class FilteredVectorOffset method advanceWhileVectorIsEmptyAndPopulateOffsets.
private void advanceWhileVectorIsEmptyAndPopulateOffsets() {
allTrue = false;
int j = 0;
while (j == 0) {
if (baseOffset.isDone()) {
currentVectorSize = 0;
return;
}
final ReadableVectorMatch match = filterMatcher.match(VectorMatch.allTrue(baseOffset.getCurrentVectorSize()));
if (match.isAllTrue(baseOffset.getCurrentVectorSize())) {
currentVectorSize = baseOffset.getCurrentVectorSize();
allTrue = true;
return;
} else if (match.isAllFalse()) {
baseOffset.advance();
} else {
final int[] selection = match.getSelection();
final int selectionSize = match.getSelectionSize();
if (baseOffset.isContiguous()) {
final int startOffset = baseOffset.getStartOffset();
for (int i = 0; i < selectionSize; i++) {
offsets[j++] = startOffset + selection[i];
}
} else {
final int[] baseOffsets = baseOffset.getOffsets();
for (int i = 0; i < selectionSize; i++) {
offsets[j++] = baseOffsets[selection[i]];
}
}
if (j == 0) {
baseOffset.advance();
}
}
}
currentVectorSize = j;
}
use of org.apache.druid.query.filter.vector.ReadableVectorMatch in project druid by druid-io.
the class OrFilter method makeVectorMatcher.
private static VectorValueMatcher makeVectorMatcher(final VectorValueMatcher[] baseMatchers) {
Preconditions.checkState(baseMatchers.length > 0);
if (baseMatchers.length == 1) {
return baseMatchers[0];
}
return new BaseVectorValueMatcher(baseMatchers[0]) {
final VectorMatch currentMask = VectorMatch.wrap(new int[getMaxVectorSize()]);
final VectorMatch scratch = VectorMatch.wrap(new int[getMaxVectorSize()]);
final VectorMatch retVal = VectorMatch.wrap(new int[getMaxVectorSize()]);
@Override
public ReadableVectorMatch match(final ReadableVectorMatch mask) {
ReadableVectorMatch currentMatch = baseMatchers[0].match(mask);
// Initialize currentMask = mask, then progressively remove rows from the mask as we find matches for them.
// This isn't necessary for correctness (we could use the original "mask" on every call to "match") but it
// allows for short-circuiting on a row-by-row basis.
currentMask.copyFrom(mask);
// Initialize retVal = currentMatch, the rows matched by the first matcher. We'll add more as we loop over
// the rest of the matchers.
retVal.copyFrom(currentMatch);
for (int i = 1; i < baseMatchers.length; i++) {
if (retVal.isAllTrue(getCurrentVectorSize())) {
// Short-circuit if the entire vector is true.
break;
}
currentMask.removeAll(currentMatch);
currentMatch = baseMatchers[i].match(currentMask);
retVal.addAll(currentMatch, scratch);
if (currentMatch == currentMask) {
// baseMatchers[i] matched every remaining row. Short-circuit out.
break;
}
}
assert retVal.isValid(mask);
return retVal;
}
};
}
use of org.apache.druid.query.filter.vector.ReadableVectorMatch in project druid by druid-io.
the class NotFilter method makeVectorMatcher.
@Override
public VectorValueMatcher makeVectorMatcher(final VectorColumnSelectorFactory factory) {
final VectorValueMatcher baseMatcher = baseFilter.makeVectorMatcher(factory);
return new BaseVectorValueMatcher(baseMatcher) {
final VectorMatch scratch = VectorMatch.wrap(new int[factory.getMaxVectorSize()]);
@Override
public ReadableVectorMatch match(final ReadableVectorMatch mask) {
final ReadableVectorMatch baseMatch = baseMatcher.match(mask);
scratch.copyFrom(mask);
scratch.removeAll(baseMatch);
assert scratch.isValid(mask);
return scratch;
}
};
}
use of org.apache.druid.query.filter.vector.ReadableVectorMatch in project druid by druid-io.
the class FilteredVectorAggregator method aggregate.
@Override
public void aggregate(final ByteBuffer buf, final int position, final int startRow, final int endRow) {
final ReadableVectorMatch mask;
if (startRow == 0) {
mask = VectorMatch.allTrue(endRow);
} else {
if (maskScratch == null) {
maskScratch = VectorMatch.wrap(new int[matcher.getMaxVectorSize()]);
}
final int maskSize = endRow - startRow;
final int[] maskArray = maskScratch.getSelection();
for (int i = 0; i < maskSize; i++) {
maskArray[i] = startRow + i;
}
maskScratch.setSelectionSize(maskSize);
mask = maskScratch;
}
final ReadableVectorMatch match = matcher.match(mask);
if (match.isAllTrue(matcher.getCurrentVectorSize())) {
delegate.aggregate(buf, position, startRow, endRow);
} else if (!match.isAllFalse()) {
Arrays.fill(delegatePositions, 0, match.getSelectionSize(), position);
delegate.aggregate(buf, match.getSelectionSize(), delegatePositions, match.getSelection(), 0);
}
}
use of org.apache.druid.query.filter.vector.ReadableVectorMatch in project druid by druid-io.
the class FilteredVectorAggregator method aggregate.
@Override
public void aggregate(final ByteBuffer buf, final int numRows, final int[] positions, @Nullable final int[] rows, final int positionOffset) {
final ReadableVectorMatch match0;
if (rows == null) {
match0 = VectorMatch.allTrue(numRows);
} else {
match0 = VectorMatch.wrap(rows).setSelectionSize(numRows);
}
final ReadableVectorMatch match = matcher.match(match0);
final int[] selection = match.getSelection();
if (rows == null) {
for (int i = 0; i < match.getSelectionSize(); i++) {
delegatePositions[i] = positions[selection[i]];
}
} else {
// i iterates over the match; j iterates over the "rows" array
for (int i = 0, j = 0; i < match.getSelectionSize(); i++) {
for (; rows[j] < selection[i]; j++) {
// Do nothing; the for loop is doing the work of incrementing j.
}
if (rows[j] != selection[i]) {
throw new ISE("Selection contained phantom row[%d]", selection[i]);
}
delegatePositions[i] = positions[j];
}
}
delegate.aggregate(buf, match.getSelectionSize(), delegatePositions, selection, positionOffset);
}
Aggregations