use of org.apache.druid.segment.DimensionSelector in project druid by druid-io.
the class DictionaryBuildingStringGroupByColumnSelectorStrategy method getOnlyValue.
@Override
public Object getOnlyValue(ColumnValueSelector selector) {
final DimensionSelector dimSelector = (DimensionSelector) selector;
final IndexedInts row = dimSelector.getRow();
Preconditions.checkState(row.size() < 2, "Not supported for multi-value dimensions");
if (row.size() == 0) {
return GROUP_BY_MISSING_VALUE;
}
final String value = dimSelector.lookupName(row.get(0));
final int dictId = reverseDictionary.getInt(value);
if (dictId < 0) {
dictionary.add(value);
reverseDictionary.put(value, nextId);
return nextId++;
} else {
return dictId;
}
}
use of org.apache.druid.segment.DimensionSelector in project druid by druid-io.
the class TopNMetricSpecOptimizationsTest method testShouldNotOptimizeLexicographic.
@Test
public void testShouldNotOptimizeLexicographic() {
// query interval is smaller than segment interval, no filters, can ignoreAfterThreshold
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(threshold).intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z").aggregators(AGGS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
StorageAdapter adapter = makeFakeStorageAdapter("2018-05-30T00:00:00Z", "2018-05-31T00:00:00Z", cardinality);
DimensionSelector dimSelector = makeFakeDimSelector(cardinality);
BaseTopNAlgorithm.AggregatorArrayProvider arrayProviderToTest = new BaseTopNAlgorithm.AggregatorArrayProvider(dimSelector, query, cardinality, adapter);
arrayProviderToTest.ignoreAfterThreshold();
Pair<Integer, Integer> thePair = arrayProviderToTest.computeStartEnd(cardinality);
Assert.assertEquals(new Integer(0), thePair.lhs);
Assert.assertEquals(new Integer(cardinality), thePair.rhs);
}
use of org.apache.druid.segment.DimensionSelector in project druid by druid-io.
the class TopNMetricSpecOptimizationsTest method testAgainShouldNotOptimizeLexicographic.
@Test
public void testAgainShouldNotOptimizeLexicographic() {
// query interval is larger than segment interval, no filters, can NOT ignoreAfterThreshold
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(threshold).intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z").aggregators(AGGS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
StorageAdapter adapter = makeFakeStorageAdapter("2018-05-30T00:00:00Z", "2018-05-30T01:00:00Z", cardinality);
DimensionSelector dimSelector = makeFakeDimSelector(cardinality);
BaseTopNAlgorithm.AggregatorArrayProvider arrayProviderToTest = new BaseTopNAlgorithm.AggregatorArrayProvider(dimSelector, query, cardinality, adapter);
Pair<Integer, Integer> thePair = arrayProviderToTest.computeStartEnd(cardinality);
Assert.assertEquals(new Integer(0), thePair.lhs);
Assert.assertEquals(new Integer(cardinality), thePair.rhs);
}
use of org.apache.druid.segment.DimensionSelector in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testResetSanity.
@Test
public void testResetSanity() throws IOException {
IncrementalIndex index = indexCreator.createIndex();
DateTime t = DateTimes.nowUtc();
Interval interval = new Interval(t.minusMinutes(1), t.plusMinutes(1));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Collections.singletonList("sally"), ImmutableMap.of("sally", "bo")));
IncrementalIndexStorageAdapter adapter = new IncrementalIndexStorageAdapter(index);
for (boolean descending : Arrays.asList(false, true)) {
Sequence<Cursor> cursorSequence = adapter.makeCursors(new SelectorFilter("sally", "bo"), interval, VirtualColumns.EMPTY, Granularities.NONE, descending, null);
Cursor cursor = cursorSequence.limit(1).toList().get(0);
DimensionSelector dimSelector;
dimSelector = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Collections.singletonList("sally"), ImmutableMap.of("sally", "ah")));
// Cursor reset should not be affected by out of order values
cursor.reset();
dimSelector = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
}
}
use of org.apache.druid.segment.DimensionSelector in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testCursorDictionaryRaceConditionFix.
@Test
public void testCursorDictionaryRaceConditionFix() throws Exception {
// Tests the dictionary ID race condition bug described at https://github.com/apache/druid/pull/6340
final IncrementalIndex index = indexCreator.createIndex();
final long timestamp = System.currentTimeMillis();
for (int i = 0; i < 5; i++) {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v1" + i)));
}
final StorageAdapter sa = new IncrementalIndexStorageAdapter(index);
Sequence<Cursor> cursors = sa.makeCursors(new DictionaryRaceTestFilter(index, timestamp), Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, null);
final AtomicInteger assertCursorsNotEmpty = new AtomicInteger(0);
cursors.map(cursor -> {
DimensionSelector dimSelector = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
int cardinality = dimSelector.getValueCardinality();
int rowNumInCursor = 0;
while (!cursor.isDone()) {
IndexedInts row = dimSelector.getRow();
row.forEach(i -> Assert.assertTrue(i < cardinality));
cursor.advance();
rowNumInCursor++;
}
Assert.assertEquals(5, rowNumInCursor);
assertCursorsNotEmpty.incrementAndGet();
return null;
}).toList();
Assert.assertEquals(1, assertCursorsNotEmpty.get());
}
Aggregations