use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class FixedBucketsHistogramQuantileSqlAggregatorTest method createQuerySegmentWalker.
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker() throws IOException {
ApproximateHistogramDruidModule.registerSerde();
final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper()).tmpDir(temporaryFolder.newFolder()).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "m1"), new FixedBucketsHistogramAggregatorFactory("fbhist_m1", "m1", 20, 0, 10, FixedBucketsHistogram.OutlierHandlingMode.IGNORE, false)).withRollup(false).build()).rows(CalciteTests.ROWS1).buildMMappedIndex();
return new SpecificSegmentsQuerySegmentWalker(conglomerate).add(DataSegment.builder().dataSource(CalciteTests.DATASOURCE1).interval(index.getDataInterval()).version("1").shardSpec(new LinearShardSpec(0)).size(0).build(), index);
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class SchemaEvolutionTest method setUp.
@Before
public void setUp() throws IOException {
NullHandling.initializeForTests();
// Index1: c1 is a string, c2 nonexistent, "uniques" nonexistent
index1 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of("c1"))).buildMMappedIndex();
// Index2: c1 is a long, c2 is a string, "uniques" is uniques on c2, "longmin" is min on c1
index2 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new LongSumAggregatorFactory("c1", "c1"), new HyperUniquesAggregatorFactory("uniques", "c2"), new LongMinAggregatorFactory("longmin", "c1")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of("c2"))).buildMMappedIndex();
// Index3: c1 is a float, c2 is a string, "uniques" is uniques on c2
index3 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("c1", "c1"), new HyperUniquesAggregatorFactory("uniques", "c2")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of("c2"))).buildMMappedIndex();
// Index4: c1 is nonexistent, c2 is uniques on c2
index4 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new HyperUniquesAggregatorFactory("c2", "c2")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of())).buildMMappedIndex();
if (index4.getAvailableDimensions().size() != 0) {
// Just double-checking that the exclusions are working properly
throw new ISE("Expected no dimensions in index4");
}
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregatorsConflict.
@Test
public void testMergeAggregatorsConflict() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleMaxAggregatorFactory("bar", "bar"), "baz", new LongMaxAggregatorFactory("baz", "baz")), null, null, null);
final Map<String, AggregatorFactory> expectedLenient = new HashMap<>();
expectedLenient.put("foo", new LongSumAggregatorFactory("foo", "foo"));
expectedLenient.put("bar", null);
expectedLenient.put("baz", new LongMaxAggregatorFactory("baz", "baz"));
Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertEquals(expectedLenient, mergeLenient(analysis1, analysis2).getAggregators());
// Simulate multi-level merge
Assert.assertEquals(expectedLenient, mergeLenient(mergeLenient(analysis1, analysis2), mergeLenient(analysis1, analysis2)).getAggregators());
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregators.
@Test
public void testMergeAggregators() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "baz", new DoubleSumAggregatorFactory("baz", "baz")), null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), null, null, null);
Assert.assertEquals(ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar"), "baz", new DoubleSumAggregatorFactory("baz", "baz")), mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertEquals(ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar"), "baz", new DoubleSumAggregatorFactory("baz", "baz")), mergeLenient(analysis1, analysis2).getAggregators());
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregatorsOneNull.
@Test
public void testMergeAggregatorsOneNull() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, null, null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), null, null, null);
Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertEquals(ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), mergeLenient(analysis1, analysis2).getAggregators());
}
Aggregations