use of io.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class TopNBenchmark method setupQueries.
private void setupQueries() {
// queries for the basic schema
Map<String, TopNQueryBuilder> basicQueries = new LinkedHashMap<>();
BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
{
// basic.A
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Arrays.asList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = new ArrayList<>();
queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
queryAggs.add(new LongMaxAggregatorFactory("maxLongUniform", "maxLongUniform"));
queryAggs.add(new DoubleSumAggregatorFactory("sumFloatNormal", "sumFloatNormal"));
queryAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "minFloatZipf"));
queryAggs.add(new HyperUniquesAggregatorFactory("hyperUniquesMet", "hyper"));
TopNQueryBuilder queryBuilderA = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension("dimSequential").metric("sumFloatNormal").intervals(intervalSpec).aggregators(queryAggs);
basicQueries.put("A", queryBuilderA);
}
{
// basic.numericSort
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Arrays.asList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = new ArrayList<>();
queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
TopNQueryBuilder queryBuilderA = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension("dimUniform").metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)).intervals(intervalSpec).aggregators(queryAggs);
basicQueries.put("numericSort", queryBuilderA);
}
{
// basic.alphanumericSort
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Arrays.asList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = new ArrayList<>();
queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
TopNQueryBuilder queryBuilderA = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension("dimUniform").metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)).intervals(intervalSpec).aggregators(queryAggs);
basicQueries.put("alphanumericSort", queryBuilderA);
}
SCHEMA_QUERY_MAP.put("basic", basicQueries);
}
use of io.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class MetadataTest method testMerge.
@Test
public void testMerge() {
Assert.assertNull(Metadata.merge(null, null));
Assert.assertNull(Metadata.merge(ImmutableList.<Metadata>of(), null));
List<Metadata> metadataToBeMerged = new ArrayList<>();
metadataToBeMerged.add(null);
Assert.assertNull(Metadata.merge(metadataToBeMerged, null));
//sanity merge check
AggregatorFactory[] aggs = new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "f") };
Metadata m1 = new Metadata();
m1.put("k", "v");
m1.setAggregators(aggs);
m1.setTimestampSpec(new TimestampSpec("ds", "auto", null));
m1.setQueryGranularity(Granularities.ALL);
m1.setRollup(Boolean.FALSE);
Metadata m2 = new Metadata();
m2.put("k", "v");
m2.setAggregators(aggs);
m2.setTimestampSpec(new TimestampSpec("ds", "auto", null));
m2.setQueryGranularity(Granularities.ALL);
m2.setRollup(Boolean.FALSE);
Metadata merged = new Metadata();
merged.put("k", "v");
merged.setAggregators(new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "n") });
merged.setTimestampSpec(new TimestampSpec("ds", "auto", null));
merged.setRollup(Boolean.FALSE);
merged.setQueryGranularity(Granularities.ALL);
Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), null));
//merge check with one metadata being null
metadataToBeMerged.clear();
metadataToBeMerged.add(m1);
metadataToBeMerged.add(m2);
metadataToBeMerged.add(null);
merged.setAggregators(null);
merged.setTimestampSpec(null);
merged.setQueryGranularity(null);
merged.setRollup(null);
Assert.assertEquals(merged, Metadata.merge(metadataToBeMerged, null));
//merge check with client explicitly providing merged aggregators
AggregatorFactory[] explicitAggs = new AggregatorFactory[] { new DoubleMaxAggregatorFactory("x", "y") };
merged.setAggregators(explicitAggs);
Assert.assertEquals(merged, Metadata.merge(metadataToBeMerged, explicitAggs));
merged.setTimestampSpec(new TimestampSpec("ds", "auto", null));
merged.setQueryGranularity(Granularities.ALL);
m1.setRollup(Boolean.TRUE);
Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), explicitAggs));
}
use of io.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class TopNTypeInterfaceBenchmark method setupQueries.
private void setupQueries() {
// queries for the basic schema
Map<String, TopNQueryBuilder> basicQueries = new LinkedHashMap<>();
BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
{
// basic.A
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Arrays.asList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = new ArrayList<>();
queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
queryAggs.add(new LongMaxAggregatorFactory("maxLongUniform", "maxLongUniform"));
queryAggs.add(new DoubleSumAggregatorFactory("sumFloatNormal", "sumFloatNormal"));
queryAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "minFloatZipf"));
queryAggs.add(new HyperUniquesAggregatorFactory("hyperUniquesMet", "hyper"));
// Use an IdentityExtractionFn to force usage of DimExtractionTopNAlgorithm
TopNQueryBuilder queryBuilderString = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension(new ExtractionDimensionSpec("dimSequential", "dimSequential", IdentityExtractionFn.getInstance())).metric("sumFloatNormal").intervals(intervalSpec).aggregators(queryAggs);
// DimExtractionTopNAlgorithm is always used for numeric columns
TopNQueryBuilder queryBuilderLong = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension("metLongUniform").metric("sumFloatNormal").intervals(intervalSpec).aggregators(queryAggs);
TopNQueryBuilder queryBuilderFloat = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension("metFloatNormal").metric("sumFloatNormal").intervals(intervalSpec).aggregators(queryAggs);
basicQueries.put("string", queryBuilderString);
basicQueries.put("long", queryBuilderLong);
basicQueries.put("float", queryBuilderFloat);
}
{
// basic.numericSort
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Arrays.asList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = new ArrayList<>();
queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
TopNQueryBuilder queryBuilderA = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension("dimUniform").metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)).intervals(intervalSpec).aggregators(queryAggs);
basicQueries.put("numericSort", queryBuilderA);
}
{
// basic.alphanumericSort
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Arrays.asList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = new ArrayList<>();
queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
TopNQueryBuilder queryBuilderA = new TopNQueryBuilder().dataSource("blah").granularity(Granularities.ALL).dimension("dimUniform").metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)).intervals(intervalSpec).aggregators(queryAggs);
basicQueries.put("alphanumericSort", queryBuilderA);
}
SCHEMA_QUERY_MAP.put("basic", basicQueries);
}
use of io.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class OnheapIncrementalIndexTest method testOnHeapIncrementalIndexClose.
@Test
public void testOnHeapIncrementalIndexClose() throws Exception {
// Prepare the mocks & set close() call count expectation to 1
Aggregator mockedAggregator = EasyMock.createMock(LongMaxAggregator.class);
mockedAggregator.close();
EasyMock.expectLastCall().times(1);
final OnheapIncrementalIndex index = new OnheapIncrementalIndex(0, Granularities.MINUTE, new AggregatorFactory[] { new LongMaxAggregatorFactory("max", "max") }, MAX_ROWS);
index.add(new MapBasedInputRow(0, Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", 1, "max", 1)));
// override the aggregators with the mocks
index.concurrentGet(0)[0] = mockedAggregator;
// close the indexer and validate the expectations
EasyMock.replay(mockedAggregator);
index.close();
EasyMock.verify(mockedAggregator);
}
Aggregations