use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class DirectDruidClientTest method testRun.
@Test
public void testRun() throws Exception {
HttpClient httpClient = EasyMock.createMock(HttpClient.class);
final URL url = new URL("http://foo/druid/v2/");
SettableFuture<InputStream> futureResult = SettableFuture.create();
Capture<Request> capturedRequest = EasyMock.newCapture();
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(futureResult).times(1);
SettableFuture futureException = SettableFuture.create();
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(futureException).times(1);
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(SettableFuture.create()).atLeastOnce();
EasyMock.replay(httpClient);
final ServerSelector serverSelector = new ServerSelector(new DataSegment("test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L), new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));
DirectDruidClient client1 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "foo", new NoopServiceEmitter());
DirectDruidClient client2 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "foo2", new NoopServiceEmitter());
QueryableDruidServer queryableDruidServer1 = new QueryableDruidServer(new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0), client1);
serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment());
QueryableDruidServer queryableDruidServer2 = new QueryableDruidServer(new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0), client2);
serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment());
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
HashMap<String, List> context = Maps.newHashMap();
Sequence s1 = client1.run(query, context);
Assert.assertTrue(capturedRequest.hasCaptured());
Assert.assertEquals(url, capturedRequest.getValue().getUrl());
Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod());
Assert.assertEquals(1, client1.getNumOpenConnections());
// simulate read timeout
Sequence s2 = client1.run(query, context);
Assert.assertEquals(2, client1.getNumOpenConnections());
futureException.setException(new ReadTimeoutException());
Assert.assertEquals(1, client1.getNumOpenConnections());
// subsequent connections should work
Sequence s3 = client1.run(query, context);
Sequence s4 = client1.run(query, context);
Sequence s5 = client1.run(query, context);
Assert.assertTrue(client1.getNumOpenConnections() == 4);
// produce result for first connection
futureResult.set(new ByteArrayInputStream("[{\"timestamp\":\"2014-01-01T01:02:03Z\", \"result\": 42.0}]".getBytes()));
List<Result> results = Sequences.toList(s1, Lists.<Result>newArrayList());
Assert.assertEquals(1, results.size());
Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp());
Assert.assertEquals(3, client1.getNumOpenConnections());
client2.run(query, context);
client2.run(query, context);
Assert.assertTrue(client2.getNumOpenConnections() == 2);
Assert.assertTrue(serverSelector.pick() == queryableDruidServer2);
EasyMock.verify(httpClient);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class RetryQueryRunnerTest method testRunWithMissingSegments.
@Test
public void testRunWithMissingSegments() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query query, Map context) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
return Sequences.empty();
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
@Override
public int getNumTries() {
return 0;
}
@Override
public boolean isReturnPartialResults() {
return true;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 1);
Assert.assertTrue("Should return an empty sequence as a result", ((List) actualResults).size() == 0);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class RetryQueryRunnerTest method testException.
@Test(expected = SegmentMissingException.class)
public void testException() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
return Sequences.empty();
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 1;
private boolean returnPartialResults = false;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 1);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class RetryQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put("count", 0);
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
if ((int) context.get("count") == 0) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
context.put("count", 1);
return Sequences.empty();
} else {
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
}
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 1;
private boolean returnPartialResults = true;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1);
Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class FinalizingFieldAccessPostAggregatorTest method testIngestAndQueryWithArithmeticPostAggregator.
@Test
public void testIngestAndQueryWithArithmeticPostAggregator() throws Exception {
AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), GroupByQueryRunnerTest.testConfigs().get(0), tempFoler);
String metricSpec = "[{\"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"market\"}," + "{\"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"quality\"}]";
String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + " \"format\" : \"tsv\"," + " \"timestampSpec\" : {" + " \"column\" : \"timestamp\"," + " \"format\" : \"auto\"" + "}," + " \"dimensionsSpec\" : {" + " \"dimensions\": []," + " \"dimensionExclusions\" : []," + " \"spatialDimensions\" : []" + " }," + " \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + " }" + "}";
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + " { \"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"hll_market\" }," + " { \"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"hll_quality\" }" + "]," + "\"postAggregations\": [" + " { \"type\": \"arithmetic\", \"name\": \"uniq_add\", \"fn\": \"+\", \"fields\":[" + " { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_market\", \"fieldName\": \"hll_market\" }," + " { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_quality\", \"fieldName\": \"hll_quality\" }]" + " }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
Assert.assertEquals(3.0, row.getFloatMetric("hll_market"), 0.1);
Assert.assertEquals(9.0, row.getFloatMetric("hll_quality"), 0.1);
Assert.assertEquals(12.0, row.getFloatMetric("uniq_add"), 0.1);
}
Aggregations