Search in sources :

Example 56 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class InDimFilterTest method testContainsNullWhenValuesSetIsTreeSet.

@Test
public void testContainsNullWhenValuesSetIsTreeSet() {
    // Regression test for NullPointerException caused by programmatically-generated InDimFilters that use
    // TreeSets with natural comparators. These Sets throw NullPointerException on contains(null).
    // InDimFilter wraps these contains methods in null-checking lambdas.
    final TreeSet<String> values = new TreeSet<>();
    values.add("foo");
    values.add("bar");
    final InDimFilter filter = new InDimFilter("dim", values, null);
    final Map<String, Object> row = new HashMap<>();
    row.put("dim", null);
    final RowBasedColumnSelectorFactory<MapBasedRow> columnSelectorFactory = RowBasedColumnSelectorFactory.create(RowAdapters.standardRow(), () -> new MapBasedRow(0, row), RowSignature.builder().add("dim", ColumnType.STRING).build(), true);
    final ValueMatcher matcher = filter.toFilter().makeMatcher(columnSelectorFactory);
    // This would throw an exception without InDimFilter's null-checking lambda wrapping.
    Assert.assertFalse(matcher.matches());
    row.put("dim", "foo");
    // Now it should match.
    Assert.assertTrue(matcher.matches());
    row.put("dim", "fox");
    // Now it *shouldn't* match.
    Assert.assertFalse(matcher.matches());
}
Also used : MapBasedRow(org.apache.druid.data.input.MapBasedRow) HashMap(java.util.HashMap) TreeSet(java.util.TreeSet) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 57 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class TestHelper method assertTimeseriesResultValue.

private static void assertTimeseriesResultValue(String msg, Result expected, Result actual) {
    // Custom equals check to get fuzzy comparison of numerics, useful because different groupBy strategies don't
    // always generate exactly the same results (different merge ordering / float vs double)
    Assert.assertEquals(StringUtils.format("%s: timestamp", msg), expected.getTimestamp(), actual.getTimestamp());
    TimeseriesResultValue expectedVal = (TimeseriesResultValue) expected.getValue();
    TimeseriesResultValue actualVal = (TimeseriesResultValue) actual.getValue();
    final Map<String, Object> expectedMap = expectedVal.getBaseObject();
    final Map<String, Object> actualMap = actualVal.getBaseObject();
    assertRow(msg, new MapBasedRow(expected.getTimestamp(), expectedMap), new MapBasedRow(actual.getTimestamp(), actualMap));
}
Also used : MapBasedRow(org.apache.druid.data.input.MapBasedRow) TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue)

Example 58 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class LimitedBufferHashGrouperTest method testIteratorOrderByDim.

@Test
public void testIteratorOrderByDim() {
    final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
    final LimitedBufferHashGrouper<Integer> grouper = makeGrouperWithOrderBy(columnSelectorFactory, "value", OrderByColumnSpec.Direction.ASCENDING);
    for (int i = 0; i < NUM_ROWS; i++) {
        // limited grouper iterator will always sort by keys in ascending order, even if the heap was sorted by values
        // so, we aggregate with keys and values both descending so that the results are not re-ordered by key
        columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", NUM_ROWS - i + KEY_BASE)));
        Assert.assertTrue(String.valueOf(NUM_ROWS - i + KEY_BASE), grouper.aggregate(NUM_ROWS - i + KEY_BASE).isOk());
    }
    List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true));
    Assert.assertEquals(LIMIT, iterated.size());
    for (int i = 0; i < LIMIT; i++) {
        Assert.assertEquals(KEY_BASE + i + 1L, iterated.get(i).getValues()[0]);
    }
}
Also used : MapBasedRow(org.apache.druid.data.input.MapBasedRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 59 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class LimitedBufferHashGrouperTest method testIteratorOrderByAggsDesc.

@Test
public void testIteratorOrderByAggsDesc() {
    final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
    final LimitedBufferHashGrouper<Integer> grouper = makeGrouperWithOrderBy(columnSelectorFactory, "valueSum", OrderByColumnSpec.Direction.DESCENDING);
    for (int i = 0; i < NUM_ROWS; i++) {
        // limited grouper iterator will always sort by keys in ascending order, even if the heap was sorted by values
        // so, we aggregate with keys descending and values asending so that the results are not re-ordered by key
        columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", i + 1)));
        Assert.assertTrue(String.valueOf(NUM_ROWS - i + KEY_BASE), grouper.aggregate(NUM_ROWS - i + KEY_BASE).isOk());
    }
    List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true));
    Assert.assertEquals(LIMIT, iterated.size());
    for (int i = 0; i < LIMIT; i++) {
        Assert.assertEquals((long) NUM_ROWS - i, iterated.get(i).getValues()[0]);
    }
}
Also used : MapBasedRow(org.apache.druid.data.input.MapBasedRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 60 with MapBasedRow

use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.

the class LimitedBufferHashGrouperTest method testIteratorOrderByAggs.

@Test
public void testIteratorOrderByAggs() {
    final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
    final LimitedBufferHashGrouper<Integer> grouper = makeGrouperWithOrderBy(columnSelectorFactory, "valueSum", OrderByColumnSpec.Direction.ASCENDING);
    for (int i = 0; i < NUM_ROWS; i++) {
        // limited grouper iterator will always sort by keys in ascending order, even if the heap was sorted by values
        // so, we aggregate with keys and values both descending so that the results are not re-ordered by key
        columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", NUM_ROWS - i)));
        Assert.assertTrue(String.valueOf(NUM_ROWS - i + KEY_BASE), grouper.aggregate(NUM_ROWS - i + KEY_BASE).isOk());
    }
    List<Grouper.Entry<Integer>> iterated = Lists.newArrayList(grouper.iterator(true));
    Assert.assertEquals(LIMIT, iterated.size());
    for (int i = 0; i < LIMIT; i++) {
        Assert.assertEquals(i + 1L, iterated.get(i).getValues()[0]);
    }
}
Also used : MapBasedRow(org.apache.druid.data.input.MapBasedRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

MapBasedRow (org.apache.druid.data.input.MapBasedRow)65 Test (org.junit.Test)50 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)36 ArrayList (java.util.ArrayList)21 Row (org.apache.druid.data.input.Row)16 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)16 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)16 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)15 HashMap (java.util.HashMap)13 DimensionSpec (org.apache.druid.query.dimension.DimensionSpec)12 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)10 List (java.util.List)9 ResultRow (org.apache.druid.query.groupby.ResultRow)9 LongMeanAveragerFactory (org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory)9 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)8 File (java.io.File)7 ByteBuffer (java.nio.ByteBuffer)6 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)6 TimeseriesResultValue (org.apache.druid.query.timeseries.TimeseriesResultValue)6 IOException (java.io.IOException)5