Search in sources :

Example 1 with TopNQueryRunnerFactory

use of io.druid.query.topn.TopNQueryRunnerFactory in project druid by druid-io.

the class MultiValuedDimensionTest method testTopNWithDimFilterAndWithFilteredDimSpec.

@Test
public void testTopNWithDimFilterAndWithFilteredDimSpec() throws Exception {
    TopNQuery query = new TopNQueryBuilder().dataSource("xx").granularity(Granularities.ALL).dimension(new ListFilteredDimensionSpec(new DefaultDimensionSpec("tags", "tags"), ImmutableSet.of("t3"), null)).metric("count").intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).threshold(5).filters(new SelectorDimFilter("tags", "t3", null)).build();
    QueryRunnerFactory factory = new TopNQueryRunnerFactory(TestQueryRunners.getPool(), new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    QueryRunner<Result<TopNResultValue>> runner = QueryRunnerTestHelper.makeQueryRunner(factory, new QueryableIndexSegment("sid1", queryableIndex), null);
    Map<String, Object> context = Maps.newHashMap();
    Sequence<Result<TopNResultValue>> result = runner.run(query, context);
    List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of("tags", "t3", "count", 2L)))));
    TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Result<TopNResultValue>>()), "");
}
Also used : TopNQueryBuilder(io.druid.query.topn.TopNQueryBuilder) QueryableIndexSegment(io.druid.segment.QueryableIndexSegment) TopNResultValue(io.druid.query.topn.TopNResultValue) ArrayList(java.util.ArrayList) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) DateTime(org.joda.time.DateTime) ListFilteredDimensionSpec(io.druid.query.dimension.ListFilteredDimensionSpec) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) TopNQueryRunnerFactory(io.druid.query.topn.TopNQueryRunnerFactory) TopNQueryConfig(io.druid.query.topn.TopNQueryConfig) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) TopNQuery(io.druid.query.topn.TopNQuery) TopNQueryRunnerFactory(io.druid.query.topn.TopNQueryRunnerFactory) TopNQueryQueryToolChest(io.druid.query.topn.TopNQueryQueryToolChest) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest) Test(org.junit.Test)

Example 2 with TopNQueryRunnerFactory

use of io.druid.query.topn.TopNQueryRunnerFactory in project druid by druid-io.

the class AggregationTestHelper method createTopNQueryAggregationTestHelper.

public static final AggregationTestHelper createTopNQueryAggregationTestHelper(List<? extends Module> jsonModulesToRegister, TemporaryFolder tempFolder) {
    ObjectMapper mapper = new DefaultObjectMapper();
    TopNQueryQueryToolChest toolchest = new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator());
    TopNQueryRunnerFactory factory = new TopNQueryRunnerFactory(new StupidPool<>("TopNQueryRunnerFactory-bufferPool", new Supplier<ByteBuffer>() {

        @Override
        public ByteBuffer get() {
            return ByteBuffer.allocate(10 * 1024 * 1024);
        }
    }), toolchest, QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    IndexIO indexIO = new IndexIO(mapper, new ColumnConfig() {

        @Override
        public int columnCacheSizeBytes() {
            return 0;
        }
    });
    return new AggregationTestHelper(mapper, new IndexMerger(mapper, indexIO), indexIO, toolchest, factory, tempFolder, jsonModulesToRegister);
}
Also used : IndexMerger(io.druid.segment.IndexMerger) TopNQueryConfig(io.druid.query.topn.TopNQueryConfig) IndexIO(io.druid.segment.IndexIO) ColumnConfig(io.druid.segment.column.ColumnConfig) TopNQueryRunnerFactory(io.druid.query.topn.TopNQueryRunnerFactory) TopNQueryQueryToolChest(io.druid.query.topn.TopNQueryQueryToolChest) Supplier(com.google.common.base.Supplier) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 3 with TopNQueryRunnerFactory

use of io.druid.query.topn.TopNQueryRunnerFactory in project druid by druid-io.

the class TopNTypeInterfaceBenchmark method setup.

@Setup
public void setup() throws IOException {
    log.info("SETUP CALLED AT " + System.currentTimeMillis());
    if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
        ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
    }
    executorService = Execs.multiThreaded(numSegments, "TopNThreadPool");
    setupQueries();
    schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get("basic");
    queryBuilder = SCHEMA_QUERY_MAP.get("basic").get("string");
    queryBuilder.threshold(threshold);
    stringQuery = queryBuilder.build();
    TopNQueryBuilder longBuilder = SCHEMA_QUERY_MAP.get("basic").get("long");
    longBuilder.threshold(threshold);
    longQuery = longBuilder.build();
    TopNQueryBuilder floatBuilder = SCHEMA_QUERY_MAP.get("basic").get("float");
    floatBuilder.threshold(threshold);
    floatQuery = floatBuilder.build();
    incIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
        log.info("Generating rows for segment " + i);
        BenchmarkDataGenerator gen = new BenchmarkDataGenerator(schemaInfo.getColumnSchemas(), RNG_SEED + i, schemaInfo.getDataInterval(), rowsPerSegment);
        IncrementalIndex incIndex = makeIncIndex();
        for (int j = 0; j < rowsPerSegment; j++) {
            InputRow row = gen.nextRow();
            if (j % 10000 == 0) {
                log.info(j + " rows generated.");
            }
            incIndex.add(row);
        }
        incIndexes.add(incIndex);
    }
    File tmpFile = Files.createTempDir();
    log.info("Using temp dir: " + tmpFile.getAbsolutePath());
    tmpFile.deleteOnExit();
    qIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
        File indexFile = INDEX_MERGER_V9.persist(incIndexes.get(i), tmpFile, new IndexSpec());
        QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
        qIndexes.add(qIndex);
    }
    factory = new TopNQueryRunnerFactory(new StupidPool<>("TopNBenchmark-compute-bufferPool", new OffheapBufferGenerator("compute", 250000000), 0, Integer.MAX_VALUE), new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()), QueryBenchmarkUtil.NOOP_QUERYWATCHER);
}
Also used : TopNQueryBuilder(io.druid.query.topn.TopNQueryBuilder) IndexSpec(io.druid.segment.IndexSpec) IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) BenchmarkDataGenerator(io.druid.benchmark.datagen.BenchmarkDataGenerator) HyperUniquesSerde(io.druid.query.aggregation.hyperloglog.HyperUniquesSerde) OffheapBufferGenerator(io.druid.offheap.OffheapBufferGenerator) TopNQueryConfig(io.druid.query.topn.TopNQueryConfig) QueryableIndex(io.druid.segment.QueryableIndex) InputRow(io.druid.data.input.InputRow) TopNQueryRunnerFactory(io.druid.query.topn.TopNQueryRunnerFactory) StupidPool(io.druid.collections.StupidPool) TopNQueryQueryToolChest(io.druid.query.topn.TopNQueryQueryToolChest) File(java.io.File) Setup(org.openjdk.jmh.annotations.Setup)

Example 4 with TopNQueryRunnerFactory

use of io.druid.query.topn.TopNQueryRunnerFactory in project druid by druid-io.

the class TopNBenchmark method setup.

@Setup
public void setup() throws IOException {
    log.info("SETUP CALLED AT " + System.currentTimeMillis());
    if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
        ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
    }
    executorService = Execs.multiThreaded(numSegments, "TopNThreadPool");
    setupQueries();
    String[] schemaQuery = schemaAndQuery.split("\\.");
    String schemaName = schemaQuery[0];
    String queryName = schemaQuery[1];
    schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schemaName);
    queryBuilder = SCHEMA_QUERY_MAP.get(schemaName).get(queryName);
    queryBuilder.threshold(threshold);
    query = queryBuilder.build();
    incIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
        log.info("Generating rows for segment " + i);
        BenchmarkDataGenerator gen = new BenchmarkDataGenerator(schemaInfo.getColumnSchemas(), RNG_SEED + i, schemaInfo.getDataInterval(), rowsPerSegment);
        IncrementalIndex incIndex = makeIncIndex();
        for (int j = 0; j < rowsPerSegment; j++) {
            InputRow row = gen.nextRow();
            if (j % 10000 == 0) {
                log.info(j + " rows generated.");
            }
            incIndex.add(row);
        }
        incIndexes.add(incIndex);
    }
    tmpDir = Files.createTempDir();
    log.info("Using temp dir: " + tmpDir.getAbsolutePath());
    qIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
        File indexFile = INDEX_MERGER_V9.persist(incIndexes.get(i), tmpDir, new IndexSpec());
        QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
        qIndexes.add(qIndex);
    }
    factory = new TopNQueryRunnerFactory(new StupidPool<>("TopNBenchmark-compute-bufferPool", new OffheapBufferGenerator("compute", 250000000), 0, Integer.MAX_VALUE), new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()), QueryBenchmarkUtil.NOOP_QUERYWATCHER);
}
Also used : IndexSpec(io.druid.segment.IndexSpec) IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) BenchmarkDataGenerator(io.druid.benchmark.datagen.BenchmarkDataGenerator) HyperUniquesSerde(io.druid.query.aggregation.hyperloglog.HyperUniquesSerde) OffheapBufferGenerator(io.druid.offheap.OffheapBufferGenerator) TopNQueryConfig(io.druid.query.topn.TopNQueryConfig) QueryableIndex(io.druid.segment.QueryableIndex) InputRow(io.druid.data.input.InputRow) TopNQueryRunnerFactory(io.druid.query.topn.TopNQueryRunnerFactory) StupidPool(io.druid.collections.StupidPool) TopNQueryQueryToolChest(io.druid.query.topn.TopNQueryQueryToolChest) File(java.io.File) Setup(org.openjdk.jmh.annotations.Setup)

Aggregations

TopNQueryConfig (io.druid.query.topn.TopNQueryConfig)4 TopNQueryQueryToolChest (io.druid.query.topn.TopNQueryQueryToolChest)4 TopNQueryRunnerFactory (io.druid.query.topn.TopNQueryRunnerFactory)4 BenchmarkDataGenerator (io.druid.benchmark.datagen.BenchmarkDataGenerator)2 StupidPool (io.druid.collections.StupidPool)2 InputRow (io.druid.data.input.InputRow)2 OffheapBufferGenerator (io.druid.offheap.OffheapBufferGenerator)2 HyperUniquesSerde (io.druid.query.aggregation.hyperloglog.HyperUniquesSerde)2 TopNQueryBuilder (io.druid.query.topn.TopNQueryBuilder)2 IndexSpec (io.druid.segment.IndexSpec)2 QueryableIndex (io.druid.segment.QueryableIndex)2 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)2 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)2 File (java.io.File)2 Setup (org.openjdk.jmh.annotations.Setup)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 Supplier (com.google.common.base.Supplier)1 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)1 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)1 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)1