use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class RealtimeManagerTest method setUp.
@Before
public void setUp() throws Exception {
final List<TestInputRowHolder> rows = Arrays.asList(makeRow(new DateTime("9000-01-01").getMillis()), makeRow(new ParseException("parse error")), null, makeRow(new DateTime().getMillis()));
ObjectMapper jsonMapper = new DefaultObjectMapper();
schema = new DataSchema("test", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
schema2 = new DataSchema("testV2", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
RealtimeIOConfig ioConfig = new RealtimeIOConfig(new FirehoseFactory() {
@Override
public Firehose connect(InputRowParser parser) throws IOException {
return new TestFirehose(rows.iterator());
}
}, new PlumberSchool() {
@Override
public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
return plumber;
}
}, null);
RealtimeIOConfig ioConfig2 = new RealtimeIOConfig(null, new PlumberSchool() {
@Override
public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
return plumber2;
}
}, new FirehoseFactoryV2() {
@Override
public FirehoseV2 connect(InputRowParser parser, Object arg1) throws IOException, ParseException {
return new TestFirehoseV2(rows.iterator());
}
});
RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null);
plumber = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
realtimeManager = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema, ioConfig, tuningConfig)), null);
plumber2 = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema2, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
realtimeManager2 = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema2, ioConfig2, tuningConfig)), null);
tuningConfig_0 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(0), null, null, 0, 0, null, null);
tuningConfig_1 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(1), null, null, 0, 0, null, null);
schema3 = new DataSchema("testing", null, new AggregatorFactory[] { new CountAggregatorFactory("ignore") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
FireDepartment department_0 = new FireDepartment(schema3, ioConfig, tuningConfig_0);
FireDepartment department_1 = new FireDepartment(schema3, ioConfig2, tuningConfig_1);
QueryRunnerFactoryConglomerate conglomerate = new QueryRunnerFactoryConglomerate() {
@Override
public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
return factory;
}
};
chiefStartedLatch = new CountDownLatch(2);
RealtimeManager.FireChief fireChief_0 = new RealtimeManager.FireChief(department_0, conglomerate) {
@Override
public void run() {
super.initPlumber();
chiefStartedLatch.countDown();
}
};
RealtimeManager.FireChief fireChief_1 = new RealtimeManager.FireChief(department_1, conglomerate) {
@Override
public void run() {
super.initPlumber();
chiefStartedLatch.countDown();
}
};
realtimeManager3 = new RealtimeManager(Arrays.asList(department_0, department_1), conglomerate, ImmutableMap.<String, Map<Integer, RealtimeManager.FireChief>>of("testing", ImmutableMap.of(0, fireChief_0, 1, fireChief_1)));
startFireChiefWithPartitionNum(fireChief_0, 0);
startFireChiefWithPartitionNum(fireChief_1, 1);
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class AppenderatorTest method testQueryBySegments.
@Test
public void testQueryBySegments() throws Exception {
try (final AppenderatorTester tester = new AppenderatorTester(2)) {
final Appenderator appenderator = tester.getAppenderator();
appenderator.startJob();
appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 2), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(1), IR("2000", "foo", 4), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), IR("2001", "foo", 8), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), IR("2001T01", "foo", 16), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), IR("2001T02", "foo", 32), Suppliers.ofInstance(Committers.nil()));
appenderator.add(IDENTIFIERS.get(2), IR("2001T03", "foo", 64), Suppliers.ofInstance(Committers.nil()));
// Query1: segment #2
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(AppenderatorTester.DATASOURCE).aggregators(Arrays.<AggregatorFactory>asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(IDENTIFIERS.get(2).getInterval(), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
final List<Result<TimeseriesResultValue>> results1 = Lists.newArrayList();
Sequences.toList(query1.run(appenderator, ImmutableMap.<String, Object>of()), results1);
Assert.assertEquals("query1", ImmutableList.of(new Result<>(new DateTime("2001"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("count", 4L, "met", 120L)))), results1);
// Query2: segment #2, partial
final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(AppenderatorTester.DATASOURCE).aggregators(Arrays.<AggregatorFactory>asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(new Interval("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
final List<Result<TimeseriesResultValue>> results2 = Lists.newArrayList();
Sequences.toList(query2.run(appenderator, ImmutableMap.<String, Object>of()), results2);
Assert.assertEquals("query2", ImmutableList.of(new Result<>(new DateTime("2001"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("count", 1L, "met", 8L)))), results2);
// Query3: segment #2, two disjoint intervals
final TimeseriesQuery query3 = Druids.newTimeseriesQueryBuilder().dataSource(AppenderatorTester.DATASOURCE).aggregators(Arrays.<AggregatorFactory>asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(new Interval("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum()), new SegmentDescriptor(new Interval("2001T03/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
final List<Result<TimeseriesResultValue>> results3 = Lists.newArrayList();
Sequences.toList(query3.run(appenderator, ImmutableMap.<String, Object>of()), results3);
Assert.assertEquals("query2", ImmutableList.of(new Result<>(new DateTime("2001"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("count", 2L, "met", 72L)))), results3);
}
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class RealtimePlumberSchoolTest method setUp.
@Before
public void setUp() throws Exception {
tmpDir = Files.createTempDir();
ObjectMapper jsonMapper = new DefaultObjectMapper();
schema = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
schema2 = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.YEAR, Granularities.NONE, null), jsonMapper);
announcer = EasyMock.createMock(DataSegmentAnnouncer.class);
announcer.announceSegment(EasyMock.<DataSegment>anyObject());
EasyMock.expectLastCall().anyTimes();
segmentPublisher = EasyMock.createNiceMock(SegmentPublisher.class);
dataSegmentPusher = EasyMock.createNiceMock(DataSegmentPusher.class);
handoffNotifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
handoffNotifier = EasyMock.createNiceMock(SegmentHandoffNotifier.class);
EasyMock.expect(handoffNotifierFactory.createSegmentHandoffNotifier(EasyMock.anyString())).andReturn(handoffNotifier).anyTimes();
EasyMock.expect(handoffNotifier.registerSegmentHandoffCallback(EasyMock.<SegmentDescriptor>anyObject(), EasyMock.<Executor>anyObject(), EasyMock.<Runnable>anyObject())).andReturn(true).anyTimes();
emitter = EasyMock.createMock(ServiceEmitter.class);
EasyMock.replay(announcer, segmentPublisher, dataSegmentPusher, handoffNotifierFactory, handoffNotifier, emitter);
tuningConfig = new RealtimeTuningConfig(1, null, null, null, new IntervalStartVersioningPolicy(), rejectionPolicy, null, null, null, buildV9Directly, 0, 0, false, null);
realtimePlumberSchool = new RealtimePlumberSchool(emitter, new DefaultQueryRunnerFactoryConglomerate(Maps.<Class<? extends Query>, QueryRunnerFactory>newHashMap()), dataSegmentPusher, announcer, segmentPublisher, handoffNotifierFactory, MoreExecutors.sameThreadExecutor(), TestHelper.getTestIndexMerger(), TestHelper.getTestIndexMergerV9(), TestHelper.getTestIndexIO(), MapCache.create(0), FireDepartmentTest.NO_CACHE_CONFIG, TestHelper.getObjectMapper());
metrics = new FireDepartmentMetrics();
plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema, tuningConfig, metrics);
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class TieredBrokerHostSelectorTest method testSelectMatchesNothing.
@Test
public void testSelectMatchesNothing() throws Exception {
String brokerName = (String) brokerSelector.select(Druids.newTimeseriesQueryBuilder().dataSource("test").granularity("all").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"))).intervals(Arrays.<Interval>asList(new Interval("2010-08-31/2010-09-01"))).build()).lhs;
Assert.assertEquals("hotBroker", brokerName);
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class TieredBrokerHostSelectorTest method testSelectMultiInterval.
@Test
public void testSelectMultiInterval() throws Exception {
String brokerName = (String) brokerSelector.select(Druids.newTimeseriesQueryBuilder().dataSource("test").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"))).intervals(new MultipleIntervalSegmentSpec(Arrays.<Interval>asList(new Interval("2013-08-31/2013-09-01"), new Interval("2012-08-31/2012-09-01"), new Interval("2011-08-31/2011-09-01")))).build()).lhs;
Assert.assertEquals("coldBroker", brokerName);
}
Aggregations