use of org.apache.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class FinalizingFieldAccessPostAggregatorTest method testComputedInArithmeticPostAggregator.
@Test
public void testComputedInArithmeticPostAggregator() {
String aggName = "billy";
AggregatorFactory aggFactory = EasyMock.createMock(AggregatorFactory.class);
EasyMock.expect(aggFactory.getComparator()).andReturn(Comparators.naturalNullsFirst()).once();
EasyMock.expect(aggFactory.finalizeComputation("test")).andReturn(3L).once();
EasyMock.expect(aggFactory.getResultType()).andReturn(ColumnType.LONG).once();
EasyMock.replay(aggFactory);
FinalizingFieldAccessPostAggregator postAgg = buildDecorated("final_billy", aggName, ImmutableMap.of(aggName, aggFactory));
Map<String, Object> metricValues = new HashMap<>();
metricValues.put(aggName, "test");
List<PostAggregator> postAggsList = Lists.newArrayList(new ConstantPostAggregator("roku", 6), postAgg);
ArithmeticPostAggregator arithmeticPostAggregator = new ArithmeticPostAggregator("add", "+", postAggsList);
Assert.assertEquals(new Double(9.0f), arithmeticPostAggregator.compute(metricValues));
EasyMock.verify();
}
use of org.apache.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class DataGeneratorTest method testToIndex.
@Test
public void testToIndex() {
List<GeneratorColumnSchema> schemas = new ArrayList<>();
schemas.add(GeneratorColumnSchema.makeSequential("dimA", ValueType.STRING, false, 1, null, 10, 20));
schemas.add(GeneratorColumnSchema.makeEnumeratedSequential("dimB", ValueType.STRING, false, 1, null, Arrays.asList("Hello", "World", "Foo", "Bar")));
schemas.add(GeneratorColumnSchema.makeSequential("dimC", ValueType.STRING, false, 1, 0.50, 30, 40));
DataGenerator dataGenerator = new DataGenerator(schemas, 9999, 0, 0, 1000.0);
DimensionsSpec dimensions = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("dimA"), new StringDimensionSchema("dimB"), new StringDimensionSchema("dimC")));
AggregatorFactory[] metrics = { new CountAggregatorFactory("cnt") };
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.MINUTE).withDimensionsSpec(dimensions).withMetrics(metrics).withRollup(false).build();
IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setSortFacts(false).setMaxRowCount(1_000_000).build();
dataGenerator.addToIndex(index, 100);
Assert.assertEquals(100, index.size());
}
use of org.apache.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class IngestSegmentFirehoseTest method createTestIndex.
private void createTestIndex(File segmentDir) throws Exception {
final List<String> rows = Lists.newArrayList("2014102200\thost1\t10\t0\t1", "2014102200\thost2\t20\t1\t0", "2014102200\thost3\t30\t1\t1", "2014102201\thost1\t10\t1\t1", "2014102201\thost2\t20\t1\t1", "2014102201\thost3\t30\t1\t1", "2014102202\thost1\t10\t1\t1", "2014102202\thost2\t20\t1\t1", "2014102202\thost3\t30\t1\t1");
final StringInputRowParser parser = new StringInputRowParser(new DelimitedParseSpec(new TimestampSpec("timestamp", "yyyyMMddHH", null), DIMENSIONS_SPEC, "\t", null, ImmutableList.of("timestamp", "host", "visited", "x", "y", "spatial"), false, 0), StandardCharsets.UTF_8.toString());
try (final IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withDimensionsSpec(parser.getParseSpec().getDimensionsSpec()).withMetrics(AGGREGATORS.toArray(new AggregatorFactory[0])).build()).setMaxRowCount(5000).build()) {
for (String line : rows) {
index.add(parser.parse(line));
}
indexMerger.persist(index, segmentDir, new IndexSpec(), null);
}
}
use of org.apache.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class FireDepartmentTest method testSerde.
@Test
public void testSerde() throws Exception {
ObjectMapper jsonMapper = new DefaultObjectMapper();
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
FireDepartment schema = new FireDepartment(new DataSchema("foo", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim1", "dim2"))), null, null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("count") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null, jsonMapper), new RealtimeIOConfig(null, new RealtimePlumberSchool(null, null, null, null, null, null, null, NoopJoinableFactory.INSTANCE, TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()), TestHelper.getTestIndexIO(), MapCache.create(0), NO_CACHE_CONFIG, new CachePopulatorStats(), TestHelper.makeJsonMapper())), RealtimeTuningConfig.makeDefaultTuningConfig(new File("/tmp/nonexistent")));
String json = jsonMapper.writeValueAsString(schema);
FireDepartment newSchema = jsonMapper.readValue(json, FireDepartment.class);
Assert.assertEquals(schema.getDataSchema().getDataSource(), newSchema.getDataSchema().getDataSource());
Assert.assertEquals("/tmp/nonexistent", schema.getTuningConfig().getBasePersistDirectory().toString());
}
use of org.apache.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class QueriesTest method testVerifyAggregationsMultiLevel.
@Test
public void testVerifyAggregationsMultiLevel() {
List<AggregatorFactory> aggFactories = Arrays.asList(new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue"));
List<PostAggregator> postAggs = Arrays.asList(new ArithmeticPostAggregator("divideStuff", "/", Arrays.asList(new ArithmeticPostAggregator("addStuff", "+", Arrays.asList(new FieldAccessPostAggregator("idx", "idx"), new ConstantPostAggregator("const", 1))), new ArithmeticPostAggregator("subtractStuff", "-", Arrays.asList(new FieldAccessPostAggregator("rev", "rev"), new ConstantPostAggregator("const", 1))))), new ArithmeticPostAggregator("addStuff", "+", Arrays.asList(new FieldAccessPostAggregator("divideStuff", "divideStuff"), new FieldAccessPostAggregator("count", "count"))));
boolean exceptionOccured = false;
try {
Queries.prepareAggregations(ImmutableList.of(), aggFactories, postAggs);
} catch (IllegalArgumentException e) {
exceptionOccured = true;
}
Assert.assertFalse(exceptionOccured);
}
Aggregations