use of io.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class IndexMergerTest method testMismatchedMetrics.
@Test
public void testMismatchedMetrics() throws IOException {
IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
closer.closeLater(index1);
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
closer.closeLater(index2);
IncrementalIndex index3 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index3);
IncrementalIndex index4 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index4);
IncrementalIndex index5 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index5);
Interval interval = new Interval(0, new DateTime().getMillis());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
ArrayList<IndexableAdapter> toMerge = Lists.<IndexableAdapter>newArrayList(new IncrementalIndexAdapter(interval, index1, factory), new IncrementalIndexAdapter(interval, index2, factory), new IncrementalIndexAdapter(interval, index3, factory), new IncrementalIndexAdapter(interval, index4, factory), new IncrementalIndexAdapter(interval, index5, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
File merged = INDEX_MERGER.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("B", "B"), new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("D", "D") }, tmpDirMerged, indexSpec);
// Since D was not present in any of the indices, it is not present in the output
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(INDEX_IO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "B", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
use of io.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class IndexMergerTest method testMismatchedMetricsVarying.
@Test(expected = IAE.class)
public void testMismatchedMetricsVarying() throws IOException {
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
closer.closeLater(index2);
IncrementalIndex index5 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index5);
Interval interval = new Interval(0, new DateTime().getMillis());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
ArrayList<IndexableAdapter> toMerge = Lists.<IndexableAdapter>newArrayList(new IncrementalIndexAdapter(interval, index2, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
final File merged = INDEX_MERGER.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("B", "B"), new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("D", "D") }, tmpDirMerged, indexSpec);
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(INDEX_IO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "B", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
use of io.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class IndexMergerV9WithSpatialIndexTest method testSpatialQueryWithOtherSpatialDim.
@Test
public void testSpatialQueryWithOtherSpatialDim() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("spatialIsRad", new RadiusBound(new float[] { 0.0f, 0.0f }, 5))).aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())));
try {
TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
TestHelper.assertExpectedResults(expectedResults, runner.run(query, Maps.newHashMap()));
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
use of io.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class MetadataTest method testSerde.
@Test
public void testSerde() throws Exception {
ObjectMapper jsonMapper = new DefaultObjectMapper();
Metadata metadata = new Metadata();
metadata.put("k", "v");
AggregatorFactory[] aggregators = new AggregatorFactory[] { new LongSumAggregatorFactory("out", "in") };
metadata.setAggregators(aggregators);
metadata.setQueryGranularity(Granularities.ALL);
metadata.setRollup(Boolean.FALSE);
Metadata other = jsonMapper.readValue(jsonMapper.writeValueAsString(metadata), Metadata.class);
Assert.assertEquals(metadata, other);
}
use of io.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class IndexGeneratorCombinerTest method setUp.
@Before
public void setUp() throws Exception {
HadoopDruidIndexerConfig config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(new DataSchema("website", HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(new StringInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "yyyyMMddHH", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host", "keywords")), null, null)), null), Map.class), new AggregatorFactory[] { new LongSumAggregatorFactory("visited_sum", "visited"), new HyperUniquesAggregatorFactory("unique_hosts", "host") }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2010/2011"))), HadoopDruidIndexerConfig.JSON_MAPPER), new HadoopIOConfig(ImmutableMap.<String, Object>of("paths", "/tmp/dummy", "type", "static"), null, "/tmp/dummy"), HadoopTuningConfig.makeDefaultTuningConfig().withWorkingPath("/tmp/work").withVersion("ver")));
Configuration hadoopConfig = new Configuration();
hadoopConfig.set(HadoopDruidIndexerConfig.CONFIG_PROPERTY, HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(config));
Reducer.Context context = EasyMock.createMock(Reducer.Context.class);
EasyMock.expect(context.getConfiguration()).andReturn(hadoopConfig);
EasyMock.replay(context);
aggregators = config.getSchema().getDataSchema().getAggregators();
combiner = new IndexGeneratorJob.IndexGeneratorCombiner();
combiner.setup(context);
}
Aggregations