use of org.apache.druid.segment.QueryableIndexStorageAdapter in project druid by druid-io.
the class FilterPartitionTest method testAnalyze.
@Test
public void testAnalyze() {
if (!(adapter instanceof QueryableIndexStorageAdapter)) {
return;
}
QueryableIndexStorageAdapter storageAdapter = (QueryableIndexStorageAdapter) adapter;
final ColumnSelectorBitmapIndexSelector bitmapIndexSelector = storageAdapter.makeBitmapIndexSelector(BaseFilterTest.VIRTUAL_COLUMNS);
// has bitmap index, will use it by default
Filter normalFilter = new SelectorFilter("dim1", "HELLO");
QueryableIndexStorageAdapter.FilterAnalysis filterAnalysisNormal = storageAdapter.analyzeFilter(normalFilter, bitmapIndexSelector, null);
Assert.assertTrue(filterAnalysisNormal.getPreFilterBitmap() != null);
Assert.assertTrue(filterAnalysisNormal.getPostFilter() == null);
// no bitmap index, should be a post filter
Filter noBitmapFilter = new NoBitmapSelectorFilter("dim1", "HELLO");
QueryableIndexStorageAdapter.FilterAnalysis noBitmapFilterAnalysis = storageAdapter.analyzeFilter(noBitmapFilter, bitmapIndexSelector, null);
Assert.assertTrue(noBitmapFilterAnalysis.getPreFilterBitmap() == null);
Assert.assertTrue(noBitmapFilterAnalysis.getPostFilter() != null);
// this column has a bitmap index, but is forced to not use it
Filter bitmapFilterWithForceNoIndexTuning = new SelectorFilter("dim1", "HELLO", new FilterTuning(false, null, null));
QueryableIndexStorageAdapter.FilterAnalysis bitmapFilterWithForceNoIndexTuningAnalysis = storageAdapter.analyzeFilter(bitmapFilterWithForceNoIndexTuning, bitmapIndexSelector, null);
Assert.assertTrue(bitmapFilterWithForceNoIndexTuningAnalysis.getPreFilterBitmap() == null);
Assert.assertTrue(bitmapFilterWithForceNoIndexTuningAnalysis.getPostFilter() != null);
// this max cardinality is too low to use bitmap index
Filter bitmapFilterWithCardinalityMax = new SelectorFilter("dim1", "HELLO", new FilterTuning(true, 0, 3));
QueryableIndexStorageAdapter.FilterAnalysis bitmapFilterWithCardinalityMaxAnalysis = storageAdapter.analyzeFilter(bitmapFilterWithCardinalityMax, bitmapIndexSelector, null);
Assert.assertTrue(bitmapFilterWithCardinalityMaxAnalysis.getPreFilterBitmap() == null);
Assert.assertTrue(bitmapFilterWithCardinalityMaxAnalysis.getPostFilter() != null);
// this max cardinality is high enough that we can still use bitmap index
Filter bitmapFilterWithCardinalityMax2 = new SelectorFilter("dim1", "HELLO", new FilterTuning(true, 0, 1000));
QueryableIndexStorageAdapter.FilterAnalysis bitmapFilterWithCardinalityMax2Analysis = storageAdapter.analyzeFilter(bitmapFilterWithCardinalityMax2, bitmapIndexSelector, null);
Assert.assertTrue(bitmapFilterWithCardinalityMax2Analysis.getPreFilterBitmap() != null);
Assert.assertTrue(bitmapFilterWithCardinalityMax2Analysis.getPostFilter() == null);
// this min cardinality is too high, will not use bitmap index
Filter bitmapFilterWithCardinalityMin = new SelectorFilter("dim1", "HELLO", new FilterTuning(true, 1000, null));
QueryableIndexStorageAdapter.FilterAnalysis bitmapFilterWithCardinalityMinAnalysis = storageAdapter.analyzeFilter(bitmapFilterWithCardinalityMin, bitmapIndexSelector, null);
Assert.assertTrue(bitmapFilterWithCardinalityMinAnalysis.getPreFilterBitmap() == null);
Assert.assertTrue(bitmapFilterWithCardinalityMinAnalysis.getPostFilter() != null);
// cannot force using bitmap if there are no bitmaps
Filter noBitmapFilterWithForceUse = new NoBitmapSelectorFilter("dim1", "HELLO", new FilterTuning(true, null, null));
QueryableIndexStorageAdapter.FilterAnalysis noBitmapFilterWithForceUseAnalysis = storageAdapter.analyzeFilter(noBitmapFilterWithForceUse, bitmapIndexSelector, null);
Assert.assertTrue(noBitmapFilterWithForceUseAnalysis.getPreFilterBitmap() == null);
Assert.assertTrue(noBitmapFilterWithForceUseAnalysis.getPostFilter() != null);
}
use of org.apache.druid.segment.QueryableIndexStorageAdapter in project druid by druid-io.
the class DumpSegment method runDump.
private void runDump(final Injector injector, final QueryableIndex index) throws IOException {
final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class));
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(index);
final List<String> columnNames = getColumnsToInclude(index);
final DimFilter filter = filterJson != null ? objectMapper.readValue(filterJson, DimFilter.class) : null;
final Sequence<Cursor> cursors = adapter.makeCursors(Filters.toFilter(filter), index.getDataInterval().withChronology(ISOChronology.getInstanceUTC()), VirtualColumns.EMPTY, Granularities.ALL, false, null);
withOutputStream(new Function<OutputStream, Object>() {
@Override
public Object apply(final OutputStream out) {
final Sequence<Object> sequence = Sequences.map(cursors, new Function<Cursor, Object>() {
@Override
public Object apply(Cursor cursor) {
ColumnSelectorFactory columnSelectorFactory = cursor.getColumnSelectorFactory();
final List<BaseObjectColumnValueSelector> selectors = columnNames.stream().map(columnSelectorFactory::makeColumnValueSelector).collect(Collectors.toList());
while (!cursor.isDone()) {
final Map<String, Object> row = Maps.newLinkedHashMap();
for (int i = 0; i < columnNames.size(); i++) {
final String columnName = columnNames.get(i);
final Object value = selectors.get(i).getObject();
if (timeISO8601 && columnNames.get(i).equals(ColumnHolder.TIME_COLUMN_NAME)) {
row.put(columnName, new DateTime(value, DateTimeZone.UTC).toString());
} else {
row.put(columnName, value);
}
}
try {
out.write(objectMapper.writeValueAsBytes(row));
out.write('\n');
} catch (IOException e) {
throw new RuntimeException(e);
}
cursor.advance();
}
return null;
}
});
evaluateSequenceForSideEffects(sequence);
return null;
}
});
}
use of org.apache.druid.segment.QueryableIndexStorageAdapter in project druid by druid-io.
the class VectorGroupByEngineIteratorTest method testCreateOneGrouperAndCloseItWhenClose.
@Test
public void testCreateOneGrouperAndCloseItWhenClose() throws IOException {
final Interval interval = TestIndex.DATA_INTERVAL;
final AggregatorFactory factory = new DoubleSumAggregatorFactory("index", "index");
final GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setInterval(interval).setDimensions(new DefaultDimensionSpec("market", null, null)).setAggregatorSpecs(factory).build();
final StorageAdapter storageAdapter = new QueryableIndexStorageAdapter(TestIndex.getMMappedTestIndex());
final ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[4096]);
final VectorCursor cursor = storageAdapter.makeVectorCursor(Filters.toFilter(query.getDimFilter()), interval, query.getVirtualColumns(), false, QueryContexts.getVectorSize(query), null);
final List<GroupByVectorColumnSelector> dimensions = query.getDimensions().stream().map(dimensionSpec -> ColumnProcessors.makeVectorProcessor(dimensionSpec, GroupByVectorColumnProcessorFactory.instance(), cursor.getColumnSelectorFactory())).collect(Collectors.toList());
final MutableObject<VectorGrouper> grouperCaptor = new MutableObject<>();
final VectorGroupByEngineIterator iterator = new VectorGroupByEngineIterator(query, new GroupByQueryConfig(), storageAdapter, cursor, interval, dimensions, byteBuffer, null) {
@Override
VectorGrouper makeGrouper() {
grouperCaptor.setValue(Mockito.spy(super.makeGrouper()));
return grouperCaptor.getValue();
}
};
iterator.close();
Mockito.verify(grouperCaptor.getValue()).close();
}
use of org.apache.druid.segment.QueryableIndexStorageAdapter in project druid by druid-io.
the class StringFirstTimeseriesQueryTest method testTimeseriesQuery.
@Test
public void testTimeseriesQuery() {
TimeseriesQueryEngine engine = new TimeseriesQueryEngine();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(ImmutableList.of(new StringFirstAggregatorFactory("nonfolding", CLIENT_TYPE, null, 1024), new StringFirstAggregatorFactory("folding", FIRST_CLIENT_TYPE, null, 1024), new StringFirstAggregatorFactory("nonexistent", "nonexistent", null, 1024), new StringFirstAggregatorFactory("numeric", "cnt", null, 1024))).build();
List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(TIME1, new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("nonfolding", new SerializablePairLongString(TIME1.getMillis(), "iphone")).put("folding", new SerializablePairLongString(TIME1.getMillis(), "iphone")).put("nonexistent", new SerializablePairLongString(DateTimes.MAX.getMillis(), null)).put("numeric", new SerializablePairLongString(DateTimes.MAX.getMillis(), null)).build())));
final Iterable<Result<TimeseriesResultValue>> iiResults = engine.process(query, new IncrementalIndexStorageAdapter(incrementalIndex)).toList();
final Iterable<Result<TimeseriesResultValue>> qiResults = engine.process(query, new QueryableIndexStorageAdapter(queryableIndex)).toList();
TestHelper.assertExpectedResults(expectedResults, iiResults, "incremental index");
TestHelper.assertExpectedResults(expectedResults, qiResults, "queryable index");
}
use of org.apache.druid.segment.QueryableIndexStorageAdapter in project druid by druid-io.
the class ExpressionVectorSelectorsTest method sanityTestVectorizedExpressionSelectors.
public static void sanityTestVectorizedExpressionSelectors(String expression, @Nullable ExpressionType outputType, QueryableIndex index, Closer closer, int rowsPerSegment) {
final List<Object> results = new ArrayList<>(rowsPerSegment);
final VirtualColumns virtualColumns = VirtualColumns.create(ImmutableList.of(new ExpressionVirtualColumn("v", expression, ExpressionType.toColumnType(outputType), TestExprMacroTable.INSTANCE)));
final QueryableIndexStorageAdapter storageAdapter = new QueryableIndexStorageAdapter(index);
VectorCursor cursor = storageAdapter.makeVectorCursor(null, index.getDataInterval(), virtualColumns, false, 512, null);
ColumnCapabilities capabilities = virtualColumns.getColumnCapabilities(storageAdapter, "v");
int rowCount = 0;
if (capabilities.isDictionaryEncoded().isTrue()) {
SingleValueDimensionVectorSelector selector = cursor.getColumnSelectorFactory().makeSingleValueDimensionSelector(DefaultDimensionSpec.of("v"));
while (!cursor.isDone()) {
int[] row = selector.getRowVector();
for (int i = 0; i < selector.getCurrentVectorSize(); i++, rowCount++) {
results.add(selector.lookupName(row[i]));
}
cursor.advance();
}
} else {
VectorValueSelector selector = null;
VectorObjectSelector objectSelector = null;
if (outputType != null && outputType.isNumeric()) {
selector = cursor.getColumnSelectorFactory().makeValueSelector("v");
} else {
objectSelector = cursor.getColumnSelectorFactory().makeObjectSelector("v");
}
while (!cursor.isDone()) {
boolean[] nulls;
switch(outputType.getType()) {
case LONG:
nulls = selector.getNullVector();
long[] longs = selector.getLongVector();
for (int i = 0; i < selector.getCurrentVectorSize(); i++, rowCount++) {
results.add(nulls != null && nulls[i] ? null : longs[i]);
}
break;
case DOUBLE:
// special case to test floats just to get coverage on getFloatVector
if ("float2".equals(expression)) {
nulls = selector.getNullVector();
float[] floats = selector.getFloatVector();
for (int i = 0; i < selector.getCurrentVectorSize(); i++, rowCount++) {
results.add(nulls != null && nulls[i] ? null : (double) floats[i]);
}
} else {
nulls = selector.getNullVector();
double[] doubles = selector.getDoubleVector();
for (int i = 0; i < selector.getCurrentVectorSize(); i++, rowCount++) {
results.add(nulls != null && nulls[i] ? null : doubles[i]);
}
}
break;
case STRING:
Object[] objects = objectSelector.getObjectVector();
for (int i = 0; i < objectSelector.getCurrentVectorSize(); i++, rowCount++) {
results.add(objects[i]);
}
break;
}
cursor.advance();
}
}
closer.register(cursor);
Sequence<Cursor> cursors = new QueryableIndexStorageAdapter(index).makeCursors(null, index.getDataInterval(), virtualColumns, Granularities.ALL, false, null);
int rowCountCursor = cursors.map(nonVectorized -> {
final ColumnValueSelector nonSelector = nonVectorized.getColumnSelectorFactory().makeColumnValueSelector("v");
int rows = 0;
while (!nonVectorized.isDone()) {
Assert.assertEquals(StringUtils.format("Failed at row %s", rows), nonSelector.getObject(), results.get(rows));
rows++;
nonVectorized.advance();
}
return rows;
}).accumulate(0, (acc, in) -> acc + in);
Assert.assertTrue(rowCountCursor > 0);
Assert.assertEquals(rowCountCursor, rowCount);
}
Aggregations