use of org.joda.time.DateTime in project druid by druid-io.
the class StatsDEmitterTest method testConvertRange.
@Test
public void testConvertRange() {
StatsDClient client = createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(new StatsDEmitterConfig("localhost", 8888, null, null, null, null), new ObjectMapper(), client);
client.gauge("broker.query.cache.total.hitRate", 54);
replay(client);
emitter.emit(new ServiceMetricEvent.Builder().setDimension("dataSource", "data-source").build(new DateTime(), "query/cache/total/hitRate", 0.54).build("broker", "brokerHost1"));
verify(client);
}
use of org.joda.time.DateTime in project druid by druid-io.
the class StatsDEmitterTest method testConfigOptions.
@Test
public void testConfigOptions() {
StatsDClient client = createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(new StatsDEmitterConfig("localhost", 8888, null, "#", true, null), new ObjectMapper(), client);
client.time("brokerHost1#broker#query#time#data-source#groupBy", 10);
replay(client);
emitter.emit(new ServiceMetricEvent.Builder().setDimension("dataSource", "data-source").setDimension("type", "groupBy").setDimension("interval", "2013/2015").setDimension("some_random_dim1", "random_dim_value1").setDimension("some_random_dim2", "random_dim_value2").setDimension("hasFilters", "no").setDimension("duration", "P1D").setDimension("remoteAddress", "194.0.90.2").setDimension("id", "ID").setDimension("context", "{context}").build(new DateTime(), "query/time", 10).build("broker", "brokerHost1"));
verify(client);
}
use of org.joda.time.DateTime in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testFilterByNull.
@Test
public void testFilterByNull() throws Exception {
IncrementalIndex index = indexCreator.createIndex();
index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "hi")));
index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("sally"), ImmutableMap.<String, Object>of("sally", "bo")));
GroupByQueryEngine engine = makeGroupByQueryEngine();
final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(0, new DateTime().getMillis())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).setDimFilter(DimFilters.dimEquals("sally", (String) null)).build(), new IncrementalIndexStorageAdapter(index));
final ArrayList<Row> results = Sequences.toList(rows, Lists.<Row>newArrayList());
Assert.assertEquals(1, results.size());
MapBasedRow row = (MapBasedRow) results.get(0);
Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
}
use of org.joda.time.DateTime in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testSingleValueTopN.
@Test
public void testSingleValueTopN() throws IOException {
IncrementalIndex index = indexCreator.createIndex();
DateTime t = DateTime.now();
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Lists.newArrayList("sally"), ImmutableMap.<String, Object>of("sally", "bo")));
TopNQueryEngine engine = new TopNQueryEngine(new StupidPool<ByteBuffer>("TopNQueryEngine-bufferPool", new Supplier<ByteBuffer>() {
@Override
public ByteBuffer get() {
return ByteBuffer.allocate(50000);
}
}));
final Iterable<Result<TopNResultValue>> results = Sequences.toList(engine.query(new TopNQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Lists.newArrayList(new Interval(0, new DateTime().getMillis()))).dimension("sally").metric("cnt").threshold(10).aggregators(Lists.<AggregatorFactory>newArrayList(new LongSumAggregatorFactory("cnt", "cnt"))).build(), new IncrementalIndexStorageAdapter(index)), Lists.<Result<TopNResultValue>>newLinkedList());
Assert.assertEquals(1, Iterables.size(results));
Assert.assertEquals(1, results.iterator().next().getValue().getValue().size());
}
use of org.joda.time.DateTime in project druid by druid-io.
the class IncrementalIndexTest method controlTest.
@Test
public void controlTest() throws IndexSizeExceededException {
IncrementalIndex index = closer.closeLater(indexCreator.createIndex());
index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy", "joe"), ImmutableMap.<String, Object>of("billy", "A", "joe", "B")));
index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy", "joe"), ImmutableMap.<String, Object>of("billy", "C", "joe", "B")));
index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy", "joe"), ImmutableMap.<String, Object>of("billy", "A", "joe", "B")));
}
Aggregations