use of io.druid.query.SegmentDescriptor in project druid by druid-io.
the class QuerySegmentSpecTest method testSerializationSegments.
@Test
public void testSerializationSegments() throws Exception {
QuerySegmentSpec spec = jsonMapper.convertValue(ImmutableMap.<String, Object>of("type", "segments", "segments", ImmutableList.<Map<String, Object>>of(ImmutableMap.<String, Object>of("itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 0), ImmutableMap.<String, Object>of("itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 1), ImmutableMap.<String, Object>of("itvl", "2011-11-01/2011-11-10", "ver", "2", "part", 10))), QuerySegmentSpec.class);
Assert.assertTrue(spec instanceof MultipleSpecificSegmentSpec);
Assert.assertEquals(ImmutableList.of(new Interval("2011-07-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), spec.getIntervals());
Assert.assertEquals(ImmutableList.of(new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 0), new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 1), new SegmentDescriptor(new Interval("2011-11-01/2011-11-10"), "2", 10)), ((MultipleSpecificSegmentSpec) spec).getDescriptors());
}
use of io.druid.query.SegmentDescriptor in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
Sequences.toList(results, Lists.newArrayList());
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = Maps.newHashMap();
results = queryRunner.run(query, responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = Lists.newArrayList();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
use of io.druid.query.SegmentDescriptor in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry2.
@SuppressWarnings("unchecked")
@Test
public void testRetry2() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
TimeseriesResultBuilder builder = new TimeseriesResultBuilder(new DateTime("2012-01-01T00:00:00Z"));
CountAggregator rows = new CountAggregator();
rows.aggregate();
builder.addMetric("rows", rows);
final Result<TimeseriesResultValue> value = builder.build();
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return Sequences.withEffect(Sequences.simple(Arrays.asList(value)), new Runnable() {
@Override
public void run() {
throw new SegmentMissingException("FAILSAUCE");
}
}, MoreExecutors.sameThreadExecutor());
}
}, new SpecificSegmentSpec(descriptor));
final Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
List<Result<TimeseriesResultValue>> res = Sequences.toList(results, Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertEquals(1, res.size());
Result<TimeseriesResultValue> theVal = res.get(0);
Assert.assertTrue(1L == theVal.getValue().getLongMetric("rows"));
validate(mapper, descriptor, responseContext);
}
use of io.druid.query.SegmentDescriptor in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method validate.
private void validate(ObjectMapper mapper, SegmentDescriptor descriptor, Map<String, Object> responseContext) throws java.io.IOException {
Object missingSegments = responseContext.get(Result.MISSING_SEGMENTS_KEY);
Assert.assertTrue(missingSegments != null);
Assert.assertTrue(missingSegments instanceof List);
Object segmentDesc = ((List) missingSegments).get(0);
Assert.assertTrue(segmentDesc instanceof SegmentDescriptor);
SegmentDescriptor newDesc = mapper.readValue(mapper.writeValueAsString(segmentDesc), SegmentDescriptor.class);
Assert.assertEquals(descriptor, newDesc);
}
use of io.druid.query.SegmentDescriptor in project druid by druid-io.
the class CoordinatorBasedSegmentHandoffNotifier method checkForSegmentHandoffs.
void checkForSegmentHandoffs() {
try {
Iterator<Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>>> itr = handOffCallbacks.entrySet().iterator();
while (itr.hasNext()) {
Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry = itr.next();
SegmentDescriptor descriptor = entry.getKey();
try {
List<ImmutableSegmentLoadInfo> loadedSegments = coordinatorClient.fetchServerView(dataSource, descriptor.getInterval(), true);
if (isHandOffComplete(loadedSegments, entry.getKey())) {
log.info("Segment Handoff complete for dataSource[%s] Segment[%s]", dataSource, descriptor);
entry.getValue().lhs.execute(entry.getValue().rhs);
itr.remove();
}
} catch (Exception e) {
log.error(e, "Exception while checking handoff for dataSource[%s] Segment[%s], Will try again after [%d]secs", dataSource, descriptor, pollDurationMillis);
}
}
if (!handOffCallbacks.isEmpty()) {
log.info("Still waiting for Handoff for Segments : [%s]", handOffCallbacks.keySet());
}
} catch (Throwable t) {
log.error(t, "Exception while checking handoff for dataSource[%s] Segment[%s], Will try again after [%d]secs", dataSource, pollDurationMillis);
}
}
Aggregations