use of io.druid.query.Query in project druid by druid-io.
the class SearchQueryTest method testSerDe.
@Test
public void testSerDe() throws IOException {
Query query = Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).dimensions(new LegacyDimensionSpec(QueryRunnerTestHelper.qualityDimension)).query("a").build();
final String json = "{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\"" + QueryRunnerTestHelper.qualityDimension + "\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}";
final Query serdeQuery = jsonMapper.readValue(json, Query.class);
Assert.assertEquals(query.toString(), serdeQuery.toString());
Assert.assertEquals(query, serdeQuery);
final String json2 = "{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\"quality\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}";
final Query serdeQuery2 = jsonMapper.readValue(json2, Query.class);
Assert.assertEquals(query.toString(), serdeQuery2.toString());
Assert.assertEquals(query, serdeQuery2);
}
use of io.druid.query.Query in project druid by druid-io.
the class SearchQueryTest method testEquals.
@Test
public void testEquals() {
Query query1 = Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).dimensions(new DefaultDimensionSpec(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension)).query("a").build();
Query query2 = Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).dimensions(new DefaultDimensionSpec(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension)).query("a").build();
Assert.assertEquals(query1, query2);
}
use of io.druid.query.Query in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
Sequences.toList(results, Lists.newArrayList());
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = Maps.newHashMap();
results = queryRunner.run(query, responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = Lists.newArrayList();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
use of io.druid.query.Query in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry2.
@SuppressWarnings("unchecked")
@Test
public void testRetry2() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
TimeseriesResultBuilder builder = new TimeseriesResultBuilder(new DateTime("2012-01-01T00:00:00Z"));
CountAggregator rows = new CountAggregator();
rows.aggregate();
builder.addMetric("rows", rows);
final Result<TimeseriesResultValue> value = builder.build();
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return Sequences.withEffect(Sequences.simple(Arrays.asList(value)), new Runnable() {
@Override
public void run() {
throw new SegmentMissingException("FAILSAUCE");
}
}, MoreExecutors.sameThreadExecutor());
}
}, new SpecificSegmentSpec(descriptor));
final Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
List<Result<TimeseriesResultValue>> res = Sequences.toList(results, Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertEquals(1, res.size());
Result<TimeseriesResultValue> theVal = res.get(0);
Assert.assertTrue(1L == theVal.getValue().getLongMetric("rows"));
validate(mapper, descriptor, responseContext);
}
use of io.druid.query.Query in project druid by druid-io.
the class TimeBoundaryQueryTest method testQuerySerialization.
@Test
public void testQuerySerialization() throws IOException {
Query query = Druids.newTimeBoundaryQueryBuilder().dataSource("testing").build();
String json = jsonMapper.writeValueAsString(query);
Query serdeQuery = jsonMapper.readValue(json, Query.class);
Assert.assertEquals(query, serdeQuery);
}
Aggregations