use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
Sequences.toList(results, Lists.newArrayList());
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = Maps.newHashMap();
results = queryRunner.run(query, responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = Lists.newArrayList();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry2.
@SuppressWarnings("unchecked")
@Test
public void testRetry2() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
TimeseriesResultBuilder builder = new TimeseriesResultBuilder(new DateTime("2012-01-01T00:00:00Z"));
CountAggregator rows = new CountAggregator();
rows.aggregate();
builder.addMetric("rows", rows);
final Result<TimeseriesResultValue> value = builder.build();
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return Sequences.withEffect(Sequences.simple(Arrays.asList(value)), new Runnable() {
@Override
public void run() {
throw new SegmentMissingException("FAILSAUCE");
}
}, MoreExecutors.sameThreadExecutor());
}
}, new SpecificSegmentSpec(descriptor));
final Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
List<Result<TimeseriesResultValue>> res = Sequences.toList(results, Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertEquals(1, res.size());
Result<TimeseriesResultValue> theVal = res.get(0);
Assert.assertTrue(1L == theVal.getValue().getLongMetric("rows"));
validate(mapper, descriptor, responseContext);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project druid by druid-io.
the class TimeBoundaryQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery> strategy = new TimeBoundaryQueryQueryToolChest().getCacheStrategy(new TimeBoundaryQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, null, null));
final Result<TimeBoundaryResultValue> result = new Result<>(new DateTime(123L), new TimeBoundaryResultValue(ImmutableMap.of(TimeBoundaryQuery.MIN_TIME, new DateTime(0L).toString(), TimeBoundaryQuery.MAX_TIME, new DateTime("2015-01-01").toString())));
Object preparedValue = strategy.prepareForCache().apply(result);
ObjectMapper objectMapper = new DefaultObjectMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
Result<TimeBoundaryResultValue> fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
Assert.assertEquals(result, fromCacheResult);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project druid by druid-io.
the class TimeBoundaryQueryTest method testContextSerde2.
@Test
public void testContextSerde2() throws Exception {
final TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("foo").intervals("2013/2014").context(ImmutableMap.<String, Object>of("priority", "1", "useCache", "true", "populateCache", "true", "finalize", "true")).build();
final ObjectMapper mapper = new DefaultObjectMapper();
final TimeBoundaryQuery serdeQuery = mapper.readValue(mapper.writeValueAsBytes(mapper.readValue(mapper.writeValueAsString(query), TimeBoundaryQuery.class)), TimeBoundaryQuery.class);
Assert.assertEquals("1", serdeQuery.getContextValue("priority"));
Assert.assertEquals("true", serdeQuery.getContextValue("useCache"));
Assert.assertEquals("true", serdeQuery.getContextValue("populateCache"));
Assert.assertEquals("true", serdeQuery.getContextValue("finalize"));
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper in project druid by druid-io.
the class AlphaNumericTopNMetricSpecTest method testSerdeAlphaNumericTopNMetricSpec.
@Test
public void testSerdeAlphaNumericTopNMetricSpec() throws IOException {
AlphaNumericTopNMetricSpec expectedMetricSpec = new AlphaNumericTopNMetricSpec(null);
AlphaNumericTopNMetricSpec expectedMetricSpec1 = new AlphaNumericTopNMetricSpec("test");
String jsonSpec = "{\n" + " \"type\": \"alphaNumeric\"\n" + "}";
String jsonSpec1 = "{\n" + " \"type\": \"alphaNumeric\",\n" + " \"previousStop\": \"test\"\n" + "}";
ObjectMapper jsonMapper = new DefaultObjectMapper();
TopNMetricSpec actualMetricSpec = jsonMapper.readValue(jsonMapper.writeValueAsString(jsonMapper.readValue(jsonSpec, TopNMetricSpec.class)), AlphaNumericTopNMetricSpec.class);
TopNMetricSpec actualMetricSpec1 = jsonMapper.readValue(jsonMapper.writeValueAsString(jsonMapper.readValue(jsonSpec1, TopNMetricSpec.class)), AlphaNumericTopNMetricSpec.class);
Assert.assertEquals(expectedMetricSpec, actualMetricSpec);
Assert.assertEquals(expectedMetricSpec1, actualMetricSpec1);
}
Aggregations