use of org.apache.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class CardinalityAggregatorTest method testSerde.
@Test
public void testSerde() throws Exception {
CardinalityAggregatorFactory factory = new CardinalityAggregatorFactory("billy", null, ImmutableList.of(new DefaultDimensionSpec("b", "b"), new DefaultDimensionSpec("a", "a"), new DefaultDimensionSpec("c", "c")), true, true);
ObjectMapper objectMapper = new DefaultObjectMapper();
Assert.assertEquals(factory, objectMapper.readValue(objectMapper.writeValueAsString(factory), AggregatorFactory.class));
String fieldNamesOnly = "{" + "\"type\":\"cardinality\"," + "\"name\":\"billy\"," + "\"fields\":[\"b\",\"a\",\"c\"]," + "\"byRow\":true," + "\"round\":true" + "}";
Assert.assertEquals(factory, objectMapper.readValue(fieldNamesOnly, AggregatorFactory.class));
CardinalityAggregatorFactory factory2 = new CardinalityAggregatorFactory("billy", ImmutableList.of(new ExtractionDimensionSpec("b", "b", new RegexDimExtractionFn(".*", false, null)), new RegexFilteredDimensionSpec(new DefaultDimensionSpec("a", "a"), ".*"), new DefaultDimensionSpec("c", "c")), true);
Assert.assertEquals(factory2, objectMapper.readValue(objectMapper.writeValueAsString(factory2), AggregatorFactory.class));
}
use of org.apache.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testFullOnTopNLongTimeColumnWithExFn.
@Test
public void testFullOnTopNLongTimeColumnWithExFn() {
String jsFn = "function(str) { return 'super-' + str; }";
ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance());
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new ExtractionDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "time_alias", jsExtractionFn)).metric("maxIndex").threshold(4).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(Lists.newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>builder().put("time_alias", "super-1296345600000").put(QueryRunnerTestHelper.INDEX_METRIC, 5497.331253051758D).put("rows", 13L).put("addRowsIndexConstant", 5511.331253051758D).put("uniques", QueryRunnerTestHelper.UNIQUES_9).put("maxIndex", 1870.061029D).put("minIndex", 97.02391052246094D).build(), ImmutableMap.<String, Object>builder().put("time_alias", "super-1298678400000").put(QueryRunnerTestHelper.INDEX_METRIC, 6541.463027954102D).put("rows", 13L).put("addRowsIndexConstant", 6555.463027954102D).put("uniques", QueryRunnerTestHelper.UNIQUES_9).put("maxIndex", 1862.737933D).put("minIndex", 83.099365234375D).build(), ImmutableMap.<String, Object>builder().put("time_alias", "super-1301529600000").put(QueryRunnerTestHelper.INDEX_METRIC, 6814.467971801758D).put("rows", 13L).put("addRowsIndexConstant", 6828.467971801758D).put("uniques", QueryRunnerTestHelper.UNIQUES_9).put("maxIndex", 1734.27490234375D).put("minIndex", 93.39083862304688D).build(), ImmutableMap.<String, Object>builder().put("time_alias", "super-1294876800000").put(QueryRunnerTestHelper.INDEX_METRIC, 6077.949111938477D).put("rows", 13L).put("addRowsIndexConstant", 6091.949111938477D).put("uniques", QueryRunnerTestHelper.UNIQUES_9).put("maxIndex", 1689.0128173828125D).put("minIndex", 94.87471008300781D).build()))));
assertExpectedResults(expectedResults, query);
}
use of org.apache.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNWithEmptyStringProducingDimExtractionFn.
@Test
public /**
* This test exists only to show what the current behavior is and not necessarily to define that this is
* correct behavior. In fact, the behavior when returning the empty string from a DimExtractionFn is, by
* contract, undefined, so this can do anything.
*/
void testTopNWithEmptyStringProducingDimExtractionFn() {
final ExtractionFn emptyStringDimExtraction = new DimExtractionFn() {
@Override
public byte[] getCacheKey() {
return new byte[] { (byte) 0xFF };
}
@Override
public String apply(String dimValue) {
return "total_market".equals(dimValue) ? "" : dimValue;
}
@Override
public boolean preservesOrdering() {
return false;
}
@Override
public ExtractionType getExtractionType() {
return ExtractionType.MANY_TO_ONE;
}
};
final TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).metric("rows").threshold(4).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.MARKET_DIMENSION, emptyStringDimExtraction)).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.asList(ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9), QueryRunnerTestHelper.orderedMap(QueryRunnerTestHelper.MARKET_DIMENSION, "", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2), ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2)))));
assertExpectedResults(expectedResults, query);
}
use of org.apache.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNDimExtractionTimeToOneLong.
@Test
public void testTopNDimExtractionTimeToOneLong() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new ExtractionDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "t", ColumnType.LONG, new JavaScriptExtractionFn("function(f) { return \"42\"; }", false, JavaScriptConfig.getEnabledInstance()))).metric("rows").threshold(10).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Collections.<Map<String, Object>>singletonList(ImmutableMap.of("addRowsIndexConstant", 504542.5071372986D, "index", 503332.5071372986D, "t", 42L, "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 1209L)))));
List<Result<TopNResultValue>> list = runWithMerge(query).toList();
Assert.assertEquals(list.size(), 1);
Assert.assertEquals("Didn't merge results", list.get(0).getValue().getValue().size(), 1);
TestHelper.assertExpectedResults(expectedResults, list, "Failed to match");
}
use of org.apache.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNDimExtractionToOne.
@Test
public void testTopNDimExtractionToOne() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.MARKET_DIMENSION, new JavaScriptExtractionFn("function(f) { return \"POTATO\"; }", false, JavaScriptConfig.getEnabledInstance()))).metric("rows").threshold(10).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Collections.<Map<String, Object>>singletonList(ImmutableMap.of("addRowsIndexConstant", 504542.5071372986D, "index", 503332.5071372986D, QueryRunnerTestHelper.MARKET_DIMENSION, "POTATO", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 1209L)))));
List<Result<TopNResultValue>> list = runWithMerge(query).toList();
Assert.assertEquals(list.size(), 1);
Assert.assertEquals("Didn't merge results", list.get(0).getValue().getValue().size(), 1);
TestHelper.assertExpectedResults(expectedResults, list, "Failed to match");
}
Aggregations