Search in sources :

Example 81 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class CachingQueryRunnerTest method testUseCache.

private void testUseCache(List<Result> expectedResults, Query query, QueryToolChest toolchest) throws Exception {
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    Cache cache = MapCache.create(1024 * 1024);
    CacheUtil.populate(cache, objectMapper, cacheKey, Iterables.transform(expectedResults, cacheStrategy.prepareForCache()));
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, // return an empty sequence since results should get pulled from cache
    new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return Sequences.empty();
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    HashMap<String, Object> context = new HashMap<String, Object>();
    List<Result> results = Sequences.toList(runner.run(query, context), new ArrayList());
    Assert.assertEquals(expectedResults.toString(), results.toString());
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Sequence(io.druid.java.util.common.guava.Sequence) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheConfig(io.druid.client.cache.CacheConfig) CacheStrategy(io.druid.query.CacheStrategy) Interval(org.joda.time.Interval) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache)

Example 82 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class DirectDruidClientTest method testQueryInterruptionExceptionLogMessage.

@Test
public void testQueryInterruptionExceptionLogMessage() throws JsonProcessingException {
    HttpClient httpClient = EasyMock.createMock(HttpClient.class);
    SettableFuture<Object> interruptionFuture = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    String hostName = "localhost:8080";
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(interruptionFuture).anyTimes();
    EasyMock.replay(httpClient);
    DataSegment dataSegment = new DataSegment("test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L);
    final ServerSelector serverSelector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));
    DirectDruidClient client1 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, hostName, new NoopServiceEmitter());
    QueryableDruidServer queryableDruidServer = new QueryableDruidServer(new DruidServer("test1", hostName, 0, "historical", DruidServer.DEFAULT_TIER, 0), client1);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer, dataSegment);
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    HashMap<String, List> context = Maps.newHashMap();
    interruptionFuture.set(new ByteArrayInputStream("{\"error\":\"testing1\",\"errorMessage\":\"testing2\"}".getBytes()));
    Sequence results = client1.run(query, context);
    QueryInterruptedException actualException = null;
    try {
        Sequences.toList(results, Lists.newArrayList());
    } catch (QueryInterruptedException e) {
        actualException = e;
    }
    Assert.assertNotNull(actualException);
    Assert.assertEquals("testing1", actualException.getErrorCode());
    Assert.assertEquals("testing2", actualException.getMessage());
    Assert.assertEquals(hostName, actualException.getHost());
    EasyMock.verify(httpClient);
}
Also used : TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) DataSegment(io.druid.timeline.DataSegment) DateTime(org.joda.time.DateTime) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) ServerSelector(io.druid.client.selector.ServerSelector) HighestPriorityTierSelectorStrategy(io.druid.client.selector.HighestPriorityTierSelectorStrategy) List(java.util.List) QueryInterruptedException(io.druid.query.QueryInterruptedException) ConnectionCountServerSelectorStrategy(io.druid.client.selector.ConnectionCountServerSelectorStrategy) Request(com.metamx.http.client.Request) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) Sequence(io.druid.java.util.common.guava.Sequence) ByteArrayInputStream(java.io.ByteArrayInputStream) HttpClient(com.metamx.http.client.HttpClient) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) HttpResponseHandler(com.metamx.http.client.response.HttpResponseHandler) ReflectionQueryToolChestWarehouse(io.druid.query.ReflectionQueryToolChestWarehouse) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 83 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class DirectDruidClientTest method testRun.

@Test
public void testRun() throws Exception {
    HttpClient httpClient = EasyMock.createMock(HttpClient.class);
    final URL url = new URL("http://foo/druid/v2/");
    SettableFuture<InputStream> futureResult = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(futureResult).times(1);
    SettableFuture futureException = SettableFuture.create();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(futureException).times(1);
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(SettableFuture.create()).atLeastOnce();
    EasyMock.replay(httpClient);
    final ServerSelector serverSelector = new ServerSelector(new DataSegment("test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L), new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));
    DirectDruidClient client1 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "foo", new NoopServiceEmitter());
    DirectDruidClient client2 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "foo2", new NoopServiceEmitter());
    QueryableDruidServer queryableDruidServer1 = new QueryableDruidServer(new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0), client1);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment());
    QueryableDruidServer queryableDruidServer2 = new QueryableDruidServer(new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0), client2);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment());
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    HashMap<String, List> context = Maps.newHashMap();
    Sequence s1 = client1.run(query, context);
    Assert.assertTrue(capturedRequest.hasCaptured());
    Assert.assertEquals(url, capturedRequest.getValue().getUrl());
    Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod());
    Assert.assertEquals(1, client1.getNumOpenConnections());
    // simulate read timeout
    Sequence s2 = client1.run(query, context);
    Assert.assertEquals(2, client1.getNumOpenConnections());
    futureException.setException(new ReadTimeoutException());
    Assert.assertEquals(1, client1.getNumOpenConnections());
    // subsequent connections should work
    Sequence s3 = client1.run(query, context);
    Sequence s4 = client1.run(query, context);
    Sequence s5 = client1.run(query, context);
    Assert.assertTrue(client1.getNumOpenConnections() == 4);
    // produce result for first connection
    futureResult.set(new ByteArrayInputStream("[{\"timestamp\":\"2014-01-01T01:02:03Z\", \"result\": 42.0}]".getBytes()));
    List<Result> results = Sequences.toList(s1, Lists.<Result>newArrayList());
    Assert.assertEquals(1, results.size());
    Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp());
    Assert.assertEquals(3, client1.getNumOpenConnections());
    client2.run(query, context);
    client2.run(query, context);
    Assert.assertTrue(client2.getNumOpenConnections() == 2);
    Assert.assertTrue(serverSelector.pick() == queryableDruidServer2);
    EasyMock.verify(httpClient);
}
Also used : SettableFuture(com.google.common.util.concurrent.SettableFuture) ReadTimeoutException(org.jboss.netty.handler.timeout.ReadTimeoutException) TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) DataSegment(io.druid.timeline.DataSegment) URL(java.net.URL) DateTime(org.joda.time.DateTime) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) Result(io.druid.query.Result) ServerSelector(io.druid.client.selector.ServerSelector) HighestPriorityTierSelectorStrategy(io.druid.client.selector.HighestPriorityTierSelectorStrategy) List(java.util.List) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) ConnectionCountServerSelectorStrategy(io.druid.client.selector.ConnectionCountServerSelectorStrategy) Request(com.metamx.http.client.Request) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) Sequence(io.druid.java.util.common.guava.Sequence) ByteArrayInputStream(java.io.ByteArrayInputStream) HttpClient(com.metamx.http.client.HttpClient) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) HttpResponseHandler(com.metamx.http.client.response.HttpResponseHandler) ReflectionQueryToolChestWarehouse(io.druid.query.ReflectionQueryToolChestWarehouse) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 84 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class LookupDimensionSpecTest method testSerDesr.

@Parameters
@Test
public void testSerDesr(DimensionSpec lookupDimSpec) throws IOException {
    ObjectMapper mapper = new DefaultObjectMapper();
    InjectableValues injectableValues = new InjectableValues.Std().addValue(LookupReferencesManager.class, LOOKUP_REF_MANAGER);
    String serLookup = mapper.writeValueAsString(lookupDimSpec);
    Assert.assertEquals(lookupDimSpec, mapper.reader(DimensionSpec.class).with(injectableValues).readValue(serLookup));
}
Also used : DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) InjectableValues(com.fasterxml.jackson.databind.InjectableValues) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Parameters(junitparams.Parameters) Test(org.junit.Test)

Example 85 with DefaultObjectMapper

use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.

the class NamespacedExtractorModuleTest method testNewUpdate.

@Test
public void testNewUpdate() throws Exception {
    final File tmpFile = temporaryFolder.newFile();
    try (OutputStreamWriter out = new FileWriter(tmpFile)) {
        out.write(mapper.writeValueAsString(ImmutableMap.<String, String>of("foo", "bar")));
    }
    final URIExtractionNamespace namespace = new URIExtractionNamespace(tmpFile.toURI(), null, null, new URIExtractionNamespace.ObjectMapperFlatDataParser(URIExtractionNamespaceTest.registerTypes(new DefaultObjectMapper())), new Period(0), null);
    Assert.assertEquals(0, scheduler.getActiveEntries());
    try (CacheScheduler.Entry entry = scheduler.scheduleAndWait(namespace, 10_000)) {
        Assert.assertNotNull(entry);
        entry.awaitTotalUpdates(1);
        Assert.assertEquals(1, scheduler.getActiveEntries());
    }
}
Also used : URIExtractionNamespace(io.druid.query.lookup.namespace.URIExtractionNamespace) FileWriter(java.io.FileWriter) Period(org.joda.time.Period) OutputStreamWriter(java.io.OutputStreamWriter) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) File(java.io.File) CacheScheduler(io.druid.server.lookup.namespace.cache.CacheScheduler) URIExtractionNamespaceTest(io.druid.query.lookup.namespace.URIExtractionNamespaceTest) Test(org.junit.Test)

Aggregations

DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)164 Test (org.junit.Test)133 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)112 Interval (org.joda.time.Interval)24 DateTime (org.joda.time.DateTime)17 DataSegment (io.druid.timeline.DataSegment)16 File (java.io.File)16 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)14 Before (org.junit.Before)13 Map (java.util.Map)11 Period (org.joda.time.Period)11 Query (io.druid.query.Query)9 Result (io.druid.query.Result)9 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)9 IOException (java.io.IOException)9 DataSchema (io.druid.segment.indexing.DataSchema)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 Sequence (io.druid.java.util.common.guava.Sequence)7 UniformGranularitySpec (io.druid.segment.indexing.granularity.UniformGranularitySpec)7 List (java.util.List)7