use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testCursoringAndIndexUpdationInterleaving.
@Test
public void testCursoringAndIndexUpdationInterleaving() throws Exception {
final IncrementalIndex index = indexCreator.createIndex();
final long timestamp = System.currentTimeMillis();
for (int i = 0; i < 2; i++) {
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "v1" + i)));
}
final StorageAdapter sa = new IncrementalIndexStorageAdapter(index);
Sequence<Cursor> cursors = sa.makeCursors(null, new Interval(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false);
Sequences.toList(Sequences.map(cursors, new Function<Cursor, Object>() {
@Nullable
@Override
public Object apply(Cursor cursor) {
DimensionSelector dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
int cardinality = dimSelector.getValueCardinality();
//index gets more rows at this point, while other thread is iterating over the cursor
try {
for (int i = 0; i < 1; i++) {
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "v2" + i)));
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
// and then, cursoring continues in the other thread
while (!cursor.isDone()) {
IndexedInts row = dimSelector.getRow();
for (int i : row) {
Assert.assertTrue(i < cardinality);
}
cursor.advance();
}
return null;
}
}), new ArrayList<>());
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class VirtualColumnsTest method testMakeSelectors.
@Test
public void testMakeSelectors() {
final VirtualColumns virtualColumns = makeVirtualColumns();
final ObjectColumnSelector objectSelector = virtualColumns.makeObjectColumnSelector("expr", null);
final DimensionSelector dimensionSelector = virtualColumns.makeDimensionSelector(new DefaultDimensionSpec("expr", "x"), null);
final DimensionSelector extractionDimensionSelector = virtualColumns.makeDimensionSelector(new ExtractionDimensionSpec("expr", "x", new BucketExtractionFn(1.0, 0.5)), null);
final FloatColumnSelector floatSelector = virtualColumns.makeFloatColumnSelector("expr", null);
final LongColumnSelector longSelector = virtualColumns.makeLongColumnSelector("expr", null);
Assert.assertEquals(1L, objectSelector.get());
Assert.assertEquals("1", dimensionSelector.lookupName(dimensionSelector.getRow().get(0)));
Assert.assertEquals("0.5", extractionDimensionSelector.lookupName(extractionDimensionSelector.getRow().get(0)));
Assert.assertEquals(1.0f, floatSelector.get(), 0.0f);
Assert.assertEquals(1L, longSelector.get());
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class VirtualColumnsTest method testMakeSelectorsWithDotSupport.
@Test
public void testMakeSelectorsWithDotSupport() {
final VirtualColumns virtualColumns = makeVirtualColumns();
final ObjectColumnSelector objectSelector = virtualColumns.makeObjectColumnSelector("foo.5", null);
final DimensionSelector dimensionSelector = virtualColumns.makeDimensionSelector(new DefaultDimensionSpec("foo.5", "x"), null);
final FloatColumnSelector floatSelector = virtualColumns.makeFloatColumnSelector("foo.5", null);
final LongColumnSelector longSelector = virtualColumns.makeLongColumnSelector("foo.5", null);
Assert.assertEquals(5L, objectSelector.get());
Assert.assertEquals("5", dimensionSelector.lookupName(dimensionSelector.getRow().get(0)));
Assert.assertEquals(5.0f, floatSelector.get(), 0.0f);
Assert.assertEquals(5L, longSelector.get());
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class CachingClusteredClientTest method testGroupByCaching.
@Test
public void testGroupByCaching() throws Exception {
List<AggregatorFactory> aggsWithUniques = ImmutableList.<AggregatorFactory>builder().addAll(AGGS).add(new HyperUniquesAggregatorFactory("uniques", "uniques")).build();
final HashFunction hashFn = Hashing.murmur3_128();
GroupByQuery.Builder builder = new GroupByQuery.Builder().setDataSource(DATA_SOURCE).setQuerySegmentSpec(SEG_SPEC).setDimFilter(DIM_FILTER).setGranularity(GRANULARITY).setDimensions(Arrays.<DimensionSpec>asList(new DefaultDimensionSpec("a", "a"))).setAggregatorSpecs(aggsWithUniques).setPostAggregatorSpecs(POST_AGGS).setContext(CONTEXT);
final HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
collector.add(hashFn.hashString("abc123", Charsets.UTF_8).asBytes());
collector.add(hashFn.hashString("123abc", Charsets.UTF_8).asBytes());
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeGroupByResults(new DateTime("2011-01-01"), ImmutableMap.of("a", "a", "rows", 1, "imps", 1, "impers", 1, "uniques", collector)), new Interval("2011-01-02/2011-01-03"), makeGroupByResults(new DateTime("2011-01-02"), ImmutableMap.of("a", "b", "rows", 2, "imps", 2, "impers", 2, "uniques", collector)), new Interval("2011-01-05/2011-01-10"), makeGroupByResults(new DateTime("2011-01-05"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-06"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-07"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-08"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-09"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector)), new Interval("2011-01-05/2011-01-10"), makeGroupByResults(new DateTime("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector)));
QueryRunner runner = new FinalizeResultsQueryRunner(client, GroupByQueryRunnerTest.makeQueryRunnerFactory(new GroupByQueryConfig()).getToolchest());
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedObjects(makeGroupByResults(new DateTime("2011-01-05T"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-06T"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-07T"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-08T"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-09T"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector)), runner.run(builder.setInterval("2011-01-05/2011-01-10").build(), context), "");
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class RealtimeManagerTest method testQueryWithSegmentSpec.
@Test(timeout = 10_000L)
public void testQueryWithSegmentSpec() throws IOException, InterruptedException {
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 1L, "idx", 135L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 1L, "idx", 118L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 158L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "health", "rows", 1L, "idx", 120L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 3L, "idx", 2900L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "technology", "rows", 1L, "idx", 78L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 1L, "idx", 119L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "automotive", "rows", 1L, "idx", 147L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "business", "rows", 1L, "idx", 112L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 1L, "idx", 166L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "health", "rows", 1L, "idx", 113L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "premium", "rows", 3L, "idx", 2505L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "technology", "rows", 1L, "idx", 97L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "rows", 1L, "idx", 126L));
chiefStartedLatch.await();
for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners((GroupByQueryRunnerFactory) factory)) {
GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(QueryRunnerTestHelper.dayGran).build();
plumber.setRunners(ImmutableMap.of(query.getIntervals().get(0), runner));
plumber2.setRunners(ImmutableMap.of(query.getIntervals().get(0), runner));
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, realtimeManager3.getQueryRunnerForSegments(query, ImmutableList.<SegmentDescriptor>of(new SegmentDescriptor(new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", 0))), query);
TestHelper.assertExpectedObjects(expectedResults, results, "");
results = GroupByQueryRunnerTestHelper.runQuery(factory, realtimeManager3.getQueryRunnerForSegments(query, ImmutableList.<SegmentDescriptor>of(new SegmentDescriptor(new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", 1))), query);
TestHelper.assertExpectedObjects(expectedResults, results, "");
}
}
Aggregations