Search in sources :

Example 71 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class DruidSegmentReaderTest method testReaderAutoTimestampFormat.

@Test
public void testReaderAutoTimestampFormat() throws IOException {
    final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "auto", DateTimes.of("1971")), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
    Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) NullHandlingTest(org.apache.druid.common.config.NullHandlingTest) Test(org.junit.Test)

Example 72 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class DruidSegmentReaderTest method testReaderWithDimensionExclusions.

@Test
public void testReaderWithDimensionExclusions() throws IOException {
    final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("__time", "millis", DateTimes.of("1971")), DimensionsSpec.builder().setDimensionExclusions(ImmutableList.of("__time", "s", "cnt", "met_s")).build(), ColumnsFilter.all(), null, temporaryFolder.newFolder());
    Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("2000"), ImmutableList.of("d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("2000T01"), ImmutableList.of("d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Also used : TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) NullHandlingTest(org.apache.druid.common.config.NullHandlingTest) Test(org.junit.Test)

Example 73 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class DruidSegmentReaderTest method testReaderTimestampFromDouble.

@Test
public void testReaderTimestampFromDouble() throws IOException {
    final DruidSegmentReader reader = new DruidSegmentReader(makeInputEntity(Intervals.of("2000/P1D")), indexIO, new TimestampSpec("d", "posix", null), new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("s"), new DoubleDimensionSchema("d"))), ColumnsFilter.all(), null, temporaryFolder.newFolder());
    Assert.assertEquals(ImmutableList.of(new MapBasedInputRow(DateTimes.of("1970-01-01T00:00:01.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T").getMillis()).put("s", "foo").put("d", 1.23d).put("cnt", 1L).put("met_s", makeHLLC("foo")).build()), new MapBasedInputRow(DateTimes.of("1970-01-01T00:00:04.000Z"), ImmutableList.of("s", "d"), ImmutableMap.<String, Object>builder().put("__time", DateTimes.of("2000T01").getMillis()).put("s", "bar").put("d", 4.56d).put("cnt", 1L).put("met_s", makeHLLC("bar")).build())), readRows(reader));
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) NullHandlingTest(org.apache.druid.common.config.NullHandlingTest) Test(org.junit.Test)

Example 74 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class GroupByMultiSegmentTest method setup.

@Before
public void setup() throws Exception {
    tmpDir = FileUtils.createTempDir();
    InputRow row;
    List<String> dimNames = Arrays.asList("dimA", "metA");
    Map<String, Object> event;
    final IncrementalIndex indexA = makeIncIndex(false);
    incrementalIndices.add(indexA);
    event = new HashMap<>();
    event.put("dimA", "hello");
    event.put("metA", 100);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "world");
    event.put("metA", 75);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    final File fileA = INDEX_MERGER_V9.persist(indexA, new File(tmpDir, "A"), new IndexSpec(), null);
    QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
    final IncrementalIndex indexB = makeIncIndex(false);
    incrementalIndices.add(indexB);
    event = new HashMap<>();
    event.put("dimA", "foo");
    event.put("metA", 100);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "world");
    event.put("metA", 75);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexB.add(row);
    final File fileB = INDEX_MERGER_V9.persist(indexB, new File(tmpDir, "B"), new IndexSpec(), null);
    QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);
    groupByIndices = Arrays.asList(qindexA, qindexB);
    resourceCloser = Closer.create();
    setupGroupByFactory();
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) QueryableIndex(org.apache.druid.segment.QueryableIndex) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) File(java.io.File) Before(org.junit.Before)

Example 75 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class GroupByLimitPushDownMultiNodeMergeTest method setup.

@Before
public void setup() throws Exception {
    tmpDir = FileUtils.createTempDir();
    InputRow row;
    List<String> dimNames = Arrays.asList("dimA", "metA");
    Map<String, Object> event;
    final IncrementalIndex indexA = makeIncIndex(false);
    incrementalIndices.add(indexA);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 2395L);
    row = new MapBasedInputRow(1505260888888L, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 8L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 5028L);
    row = new MapBasedInputRow(1505264400000L, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 7L);
    row = new MapBasedInputRow(1505264400400L, dimNames, event);
    indexA.add(row);
    final File fileA = INDEX_MERGER_V9.persist(indexA, new File(tmpDir, "A"), new IndexSpec(), null);
    QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
    final IncrementalIndex indexB = makeIncIndex(false);
    incrementalIndices.add(indexB);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 4718L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 18L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 2698L);
    row = new MapBasedInputRow(1505264400000L, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 3L);
    row = new MapBasedInputRow(1505264400000L, dimNames, event);
    indexB.add(row);
    final File fileB = INDEX_MERGER_V9.persist(indexB, new File(tmpDir, "B"), new IndexSpec(), null);
    QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);
    final IncrementalIndex indexC = makeIncIndex(false);
    incrementalIndices.add(indexC);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 2395L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexC.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 8L);
    row = new MapBasedInputRow(1605260800000L, dimNames, event);
    indexC.add(row);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 5028L);
    row = new MapBasedInputRow(1705264400000L, dimNames, event);
    indexC.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 7L);
    row = new MapBasedInputRow(1805264400000L, dimNames, event);
    indexC.add(row);
    final File fileC = INDEX_MERGER_V9.persist(indexC, new File(tmpDir, "C"), new IndexSpec(), null);
    QueryableIndex qindexC = INDEX_IO.loadIndex(fileC);
    final IncrementalIndex indexD = makeIncIndex(false);
    incrementalIndices.add(indexD);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 4718L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexD.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 18L);
    row = new MapBasedInputRow(1605260800000L, dimNames, event);
    indexD.add(row);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 2698L);
    row = new MapBasedInputRow(1705264400000L, dimNames, event);
    indexD.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 3L);
    row = new MapBasedInputRow(1805264400000L, dimNames, event);
    indexD.add(row);
    final File fileD = INDEX_MERGER_V9.persist(indexD, new File(tmpDir, "D"), new IndexSpec(), null);
    QueryableIndex qindexD = INDEX_IO.loadIndex(fileD);
    List<String> dimNames2 = Arrays.asList("dimA", "dimB", "metA");
    List<DimensionSchema> dimensions = Arrays.asList(new StringDimensionSchema("dimA"), new StringDimensionSchema("dimB"), new LongDimensionSchema("metA"));
    final IncrementalIndex indexE = makeIncIndex(false, dimensions);
    incrementalIndices.add(indexE);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("dimB", "raw");
    event.put("metA", 5L);
    row = new MapBasedInputRow(1505260800000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("dimB", "ripe");
    event.put("metA", 9L);
    row = new MapBasedInputRow(1605260800000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("dimB", "raw");
    event.put("metA", 3L);
    row = new MapBasedInputRow(1705264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("dimB", "ripe");
    event.put("metA", 7L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "grape");
    event.put("dimB", "raw");
    event.put("metA", 5L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "apple");
    event.put("dimB", "ripe");
    event.put("metA", 3L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "apple");
    event.put("dimB", "raw");
    event.put("metA", 1L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "apple");
    event.put("dimB", "ripe");
    event.put("metA", 4L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "apple");
    event.put("dimB", "raw");
    event.put("metA", 1L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "banana");
    event.put("dimB", "ripe");
    event.put("metA", 4L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "orange");
    event.put("dimB", "raw");
    event.put("metA", 9L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "peach");
    event.put("dimB", "ripe");
    event.put("metA", 7L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "orange");
    event.put("dimB", "raw");
    event.put("metA", 2L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    event = new HashMap<>();
    event.put("dimA", "strawberry");
    event.put("dimB", "ripe");
    event.put("metA", 10L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexE.add(row);
    final File fileE = INDEX_MERGER_V9.persist(indexE, new File(tmpDir, "E"), new IndexSpec(), null);
    QueryableIndex qindexE = INDEX_IO.loadIndex(fileE);
    final IncrementalIndex indexF = makeIncIndex(false, dimensions);
    incrementalIndices.add(indexF);
    event = new HashMap<>();
    event.put("dimA", "kiwi");
    event.put("dimB", "raw");
    event.put("metA", 7L);
    row = new MapBasedInputRow(1505260800000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "watermelon");
    event.put("dimB", "ripe");
    event.put("metA", 14L);
    row = new MapBasedInputRow(1605260800000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "kiwi");
    event.put("dimB", "raw");
    event.put("metA", 8L);
    row = new MapBasedInputRow(1705264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "kiwi");
    event.put("dimB", "ripe");
    event.put("metA", 8L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "lemon");
    event.put("dimB", "raw");
    event.put("metA", 3L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "cherry");
    event.put("dimB", "ripe");
    event.put("metA", 2L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "cherry");
    event.put("dimB", "raw");
    event.put("metA", 7L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "avocado");
    event.put("dimB", "ripe");
    event.put("metA", 12L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "cherry");
    event.put("dimB", "raw");
    event.put("metA", 3L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "plum");
    event.put("dimB", "ripe");
    event.put("metA", 5L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "plum");
    event.put("dimB", "raw");
    event.put("metA", 3L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    event = new HashMap<>();
    event.put("dimA", "lime");
    event.put("dimB", "ripe");
    event.put("metA", 7L);
    row = new MapBasedInputRow(1805264400000L, dimNames2, event);
    indexF.add(row);
    final File fileF = INDEX_MERGER_V9.persist(indexF, new File(tmpDir, "F"), new IndexSpec(), null);
    QueryableIndex qindexF = INDEX_IO.loadIndex(fileF);
    groupByIndices = Arrays.asList(qindexA, qindexB, qindexC, qindexD, qindexE, qindexF);
    resourceCloser = Closer.create();
    setupGroupByFactory();
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) QueryableIndex(org.apache.druid.segment.QueryableIndex) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) File(java.io.File) Before(org.junit.Before)

Aggregations

MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)114 Test (org.junit.Test)77 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)46 IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)42 OnheapIncrementalIndex (org.apache.druid.segment.incremental.OnheapIncrementalIndex)38 InputRow (org.apache.druid.data.input.InputRow)31 File (java.io.File)24 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)21 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)20 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)19 ArrayList (java.util.ArrayList)17 HashMap (java.util.HashMap)15 DateTime (org.joda.time.DateTime)15 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)14 IncrementalIndexTest (org.apache.druid.segment.data.IncrementalIndexTest)14 Interval (org.joda.time.Interval)14 IOException (java.io.IOException)13 DoubleDimensionSchema (org.apache.druid.data.input.impl.DoubleDimensionSchema)13 IncrementalIndexSchema (org.apache.druid.segment.incremental.IncrementalIndexSchema)12 ImmutableMap (com.google.common.collect.ImmutableMap)11