use of org.apache.druid.java.util.common.Intervals in project druid by druid-io.
the class SqlSegmentsMetadataQuery method retrieveSegments.
private CloseableIterator<DataSegment> retrieveSegments(final String dataSource, final Collection<Interval> intervals, final IntervalMode matchMode, final boolean used) {
// Check if the intervals all support comparing as strings. If so, bake them into the SQL.
final boolean compareAsString = intervals.stream().allMatch(Intervals::canCompareEndpointsAsStrings);
final StringBuilder sb = new StringBuilder();
sb.append("SELECT payload FROM %s WHERE used = :used AND dataSource = :dataSource");
if (compareAsString && !intervals.isEmpty()) {
sb.append(" AND (");
for (int i = 0; i < intervals.size(); i++) {
sb.append(matchMode.makeSqlCondition(connector.getQuoteString(), StringUtils.format(":start%d", i), StringUtils.format(":end%d", i)));
if (i == intervals.size() - 1) {
sb.append(")");
} else {
sb.append(" OR ");
}
}
}
final Query<Map<String, Object>> sql = handle.createQuery(StringUtils.format(sb.toString(), dbTables.getSegmentsTable())).setFetchSize(connector.getStreamingFetchSize()).bind("used", used).bind("dataSource", dataSource);
if (compareAsString) {
final Iterator<Interval> iterator = intervals.iterator();
for (int i = 0; iterator.hasNext(); i++) {
Interval interval = iterator.next();
sql.bind(StringUtils.format("start%d", i), interval.getStart().toString()).bind(StringUtils.format("end%d", i), interval.getEnd().toString());
}
}
final ResultIterator<DataSegment> resultIterator = sql.map((index, r, ctx) -> JacksonUtils.readValue(jsonMapper, r.getBytes(1), DataSegment.class)).iterator();
return CloseableIterators.wrap(Iterators.filter(resultIterator, dataSegment -> {
if (intervals.isEmpty()) {
return true;
} else {
// segment interval like "20010/20011".)
for (Interval interval : intervals) {
if (matchMode.apply(interval, dataSegment.getInterval())) {
return true;
}
}
return false;
}
}), resultIterator);
}
use of org.apache.druid.java.util.common.Intervals in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithTimestampResultFieldContextForMapResponse.
@Test
public void testTimeseriesWithTimestampResultFieldContextForMapResponse() {
Granularity gran = Granularities.DAY;
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(gran).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM, QueryRunnerTestHelper.QUALITY_UNIQUES).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext(ImmutableMap.of(TimeseriesQuery.CTX_TIMESTAMP_RESULT_FIELD, TIMESTAMP_RESULT_FIELD_NAME, TimeseriesQuery.SKIP_EMPTY_BUCKETS, true))).build();
Assert.assertEquals(TIMESTAMP_RESULT_FIELD_NAME, query.getTimestampResultField());
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
final String[] expectedIndex = descending ? QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES_DESC : QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES;
final String[] expectedIndexToUse = Arrays.stream(expectedIndex).filter(eachIndex -> !"0.0".equals(eachIndex)).toArray(String[]::new);
final DateTime expectedLast = descending ? QueryRunnerTestHelper.EARLIEST : QueryRunnerTestHelper.LAST;
int count = 0;
Result lastResult = null;
for (Result<TimeseriesResultValue> result : results) {
DateTime current = result.getTimestamp();
Assert.assertFalse(StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current.isBefore(expectedLast) : current.isAfter(expectedLast));
final TimeseriesResultValue value = result.getValue();
Assert.assertEquals(value.getLongMetric(TIMESTAMP_RESULT_FIELD_NAME), current.getMillis(), 0);
Assert.assertEquals(result.toString(), QueryRunnerTestHelper.SKIPPED_DAY.equals(current) ? 0L : 13L, value.getLongMetric("rows").longValue());
if (!QueryRunnerTestHelper.SKIPPED_DAY.equals(current)) {
Assert.assertEquals(result.toString(), Doubles.tryParse(expectedIndexToUse[count]).doubleValue(), value.getDoubleMetric("index").doubleValue(), value.getDoubleMetric("index").doubleValue() * 1e-6);
Assert.assertEquals(result.toString(), new Double(expectedIndexToUse[count]) + 13L + 1L, value.getDoubleMetric("addRowsIndexConstant"), value.getDoubleMetric("addRowsIndexConstant") * 1e-6);
Assert.assertEquals(value.getDoubleMetric("uniques"), 9.0d, 0.02);
} else {
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(result.toString(), 0.0D, value.getDoubleMetric("index").doubleValue(), value.getDoubleMetric("index").doubleValue() * 1e-6);
Assert.assertEquals(result.toString(), new Double(expectedIndexToUse[count]) + 1L, value.getDoubleMetric("addRowsIndexConstant"), value.getDoubleMetric("addRowsIndexConstant") * 1e-6);
Assert.assertEquals(0.0D, value.getDoubleMetric("uniques"), 0.02);
} else {
Assert.assertNull(result.toString(), value.getDoubleMetric("index"));
Assert.assertNull(result.toString(), value.getDoubleMetric("addRowsIndexConstant"));
Assert.assertEquals(value.getDoubleMetric("uniques"), 0.0d, 0.02);
}
}
lastResult = result;
++count;
}
Assert.assertEquals(lastResult.toString(), expectedLast, lastResult.getTimestamp());
}
use of org.apache.druid.java.util.common.Intervals in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNBySegment.
@Test
public void testTopNBySegment() {
final HashMap<String, Object> specialContext = new HashMap<String, Object>();
specialContext.put(QueryContexts.BY_SEGMENT_KEY, "true");
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(4).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).context(specialContext).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.of("addRowsIndexConstant", 5356.814783D, "index", 5351.814783D, QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L), ImmutableMap.of("addRowsIndexConstant", 4880.669692D, "index", 4875.669692D, QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L), ImmutableMap.of("addRowsIndexConstant", 2250.876812D, "index", 2231.876812D, QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 18L)))));
Sequence<Result<TopNResultValue>> results = runWithMerge(query);
List<Result<BySegmentTopNResultValue>> resultList = results.map((Result<TopNResultValue> input) -> {
// Stupid type erasure
Object val = input.getValue();
if (val instanceof BySegmentResultValue) {
BySegmentResultValue bySegVal = (BySegmentResultValue) val;
return new Result<>(input.getTimestamp(), new BySegmentTopNResultValue(Lists.transform(bySegVal.getResults(), res -> {
if (Preconditions.checkNotNull(res) instanceof Result) {
Result theResult = (Result) res;
Object resVal = theResult.getValue();
if (resVal instanceof TopNResultValue) {
return new Result<>(theResult.getTimestamp(), (TopNResultValue) resVal);
}
}
throw new IAE("Bad input: [%s]", res);
}), bySegVal.getSegmentId(), bySegVal.getInterval()));
}
throw new ISE("Bad type");
}).toList();
Result<BySegmentTopNResultValue> result = resultList.get(0);
TestHelper.assertExpectedResults(expectedResults, result.getValue().getResults());
}
use of org.apache.druid.java.util.common.Intervals in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithTimestampResultFieldContextForArrayResponse.
@Test
public void testTimeseriesWithTimestampResultFieldContextForArrayResponse() {
Granularity gran = Granularities.DAY;
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(gran).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM, QueryRunnerTestHelper.QUALITY_UNIQUES).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext(ImmutableMap.of(TimeseriesQuery.CTX_TIMESTAMP_RESULT_FIELD, TIMESTAMP_RESULT_FIELD_NAME, TimeseriesQuery.SKIP_EMPTY_BUCKETS, true))).build();
Assert.assertEquals(TIMESTAMP_RESULT_FIELD_NAME, query.getTimestampResultField());
QueryToolChest<Result<TimeseriesResultValue>, TimeseriesQuery> toolChest = new TimeseriesQueryQueryToolChest();
RowSignature rowSignature = toolChest.resultArraySignature(query);
Assert.assertNotNull(rowSignature);
List<String> columnNames = rowSignature.getColumnNames();
Assert.assertNotNull(columnNames);
Assert.assertEquals(6, columnNames.size());
Assert.assertEquals("__time", columnNames.get(0));
Assert.assertEquals(TIMESTAMP_RESULT_FIELD_NAME, columnNames.get(1));
Assert.assertEquals("rows", columnNames.get(2));
Assert.assertEquals("index", columnNames.get(3));
Assert.assertEquals("uniques", columnNames.get(4));
Assert.assertEquals("addRowsIndexConstant", columnNames.get(5));
Sequence<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query));
Sequence<Object[]> resultsAsArrays = toolChest.resultsAsArrays(query, results);
Assert.assertNotNull(resultsAsArrays);
final String[] expectedIndex = descending ? QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES_DESC : QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES;
final String[] expectedIndexToUse = Arrays.stream(expectedIndex).filter(eachIndex -> !"0.0".equals(eachIndex)).toArray(String[]::new);
final Long expectedLast = descending ? QueryRunnerTestHelper.EARLIEST.getMillis() : QueryRunnerTestHelper.LAST.getMillis();
int count = 0;
Object[] lastResult = null;
for (Object[] result : resultsAsArrays.toList()) {
Long current = (Long) result[0];
Assert.assertFalse(StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current < expectedLast : current > expectedLast);
Assert.assertEquals((Long) result[1], current, 0);
Assert.assertEquals(QueryRunnerTestHelper.SKIPPED_DAY.getMillis() == current ? (Long) 0L : (Long) 13L, result[2]);
if (QueryRunnerTestHelper.SKIPPED_DAY.getMillis() != current) {
Assert.assertEquals(Doubles.tryParse(expectedIndexToUse[count]).doubleValue(), (Double) result[3], (Double) result[3] * 1e-6);
Assert.assertEquals((Double) result[4], 9.0d, 0.02);
Assert.assertEquals(new Double(expectedIndexToUse[count]) + 13L + 1L, (Double) result[5], (Double) result[5] * 1e-6);
} else {
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(0.0D, (Double) result[3], (Double) result[3] * 1e-6);
Assert.assertEquals(0.0D, (Double) result[4], 0.02);
Assert.assertEquals(new Double(expectedIndexToUse[count]) + 1L, (Double) result[5], (Double) result[5] * 1e-6);
} else {
Assert.assertNull(result[3]);
Assert.assertEquals((Double) result[4], 0.0, 0.02);
Assert.assertNull(result[5]);
}
}
lastResult = result;
++count;
}
Assert.assertEquals(expectedLast, lastResult[0]);
}
use of org.apache.druid.java.util.common.Intervals in project druid by druid-io.
the class CompactSegmentsTest method testRunWithLockedIntervalsNoSkip.
@Test
public void testRunWithLockedIntervalsNoSkip() {
Mockito.when(COORDINATOR_CONFIG.getCompactionSkipLockedIntervals()).thenReturn(false);
final TestDruidLeaderClient leaderClient = new TestDruidLeaderClient(JSON_MAPPER);
leaderClient.start();
HttpIndexingServiceClient indexingServiceClient = new HttpIndexingServiceClient(JSON_MAPPER, leaderClient);
// Lock all intervals for all the dataSources
final String datasource0 = DATA_SOURCE_PREFIX + 0;
leaderClient.lockedIntervals.computeIfAbsent(datasource0, k -> new ArrayList<>()).add(Intervals.of("2017/2018"));
final String datasource1 = DATA_SOURCE_PREFIX + 1;
leaderClient.lockedIntervals.computeIfAbsent(datasource1, k -> new ArrayList<>()).add(Intervals.of("2017/2018"));
final String datasource2 = DATA_SOURCE_PREFIX + 2;
leaderClient.lockedIntervals.computeIfAbsent(datasource2, k -> new ArrayList<>()).add(Intervals.of("2017/2018"));
// Verify that no locked intervals are skipped
CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, indexingServiceClient);
int maxTaskSlots = partitionsSpec instanceof SingleDimensionPartitionsSpec ? 5 : 3;
final CoordinatorStats stats = doCompactSegments(compactSegments, createCompactionConfigs(1), maxTaskSlots);
Assert.assertEquals(3, stats.getGlobalStat(CompactSegments.COMPACTION_TASK_COUNT));
Assert.assertEquals(3, leaderClient.submittedCompactionTasks.size());
leaderClient.submittedCompactionTasks.forEach(task -> {
System.out.println(task.getDataSource() + " : " + task.getIoConfig().getInputSpec().getInterval());
});
// Verify that tasks are submitted for the latest interval of each dataSource
final Map<String, Interval> datasourceToInterval = new HashMap<>();
leaderClient.submittedCompactionTasks.forEach(task -> datasourceToInterval.put(task.getDataSource(), task.getIoConfig().getInputSpec().getInterval()));
Assert.assertEquals(Intervals.of("2017-01-09T00:00:00Z/2017-01-09T12:00:00Z"), datasourceToInterval.get(datasource0));
Assert.assertEquals(Intervals.of("2017-01-09T00:00:00Z/2017-01-09T12:00:00Z"), datasourceToInterval.get(datasource1));
Assert.assertEquals(Intervals.of("2017-01-09T00:00:00Z/2017-01-09T12:00:00Z"), datasourceToInterval.get(datasource2));
}
Aggregations