Search in sources :

Example 21 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexTest method testDuplicateDimensions.

@Test(expected = ISE.class)
public void testDuplicateDimensions() throws IndexSizeExceededException {
    IncrementalIndex index = closer.closeLater(indexCreator.createIndex());
    index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy", "joe"), ImmutableMap.<String, Object>of("billy", "A", "joe", "B")));
    index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.<String, Object>of("billy", "A", "joe", "B")));
}
Also used : MapBasedInputRow(io.druid.data.input.MapBasedInputRow) DateTime(org.joda.time.DateTime) Test(org.junit.Test)

Example 22 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexTest method testNullDimensionTransform.

@Test
public void testNullDimensionTransform() throws IndexSizeExceededException {
    IncrementalIndex<?> index = closer.closeLater(indexCreator.createIndex());
    index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("string", "float", "long"), ImmutableMap.<String, Object>of("string", Arrays.asList("A", null, ""), "float", Arrays.asList(Float.MAX_VALUE, null, ""), "long", Arrays.asList(Long.MIN_VALUE, null, ""))));
    Row row = index.iterator().next();
    Assert.assertEquals(Arrays.asList(new String[] { "", "", "A" }), row.getRaw("string"));
    Assert.assertEquals(Arrays.asList(new String[] { "", "", String.valueOf(Float.MAX_VALUE) }), row.getRaw("float"));
    Assert.assertEquals(Arrays.asList(new String[] { "", "", String.valueOf(Long.MIN_VALUE) }), row.getRaw("long"));
}
Also used : MapBasedInputRow(io.druid.data.input.MapBasedInputRow) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Row(io.druid.data.input.Row) DateTime(org.joda.time.DateTime) Test(org.junit.Test)

Example 23 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class OnheapIncrementalIndexTest method testMultithreadAddFacts.

@Test
public void testMultithreadAddFacts() throws Exception {
    final OnheapIncrementalIndex index = new OnheapIncrementalIndex(0, Granularities.MINUTE, new AggregatorFactory[] { new LongMaxAggregatorFactory("max", "max") }, MAX_ROWS);
    final Random random = new Random();
    final int addThreadCount = 2;
    Thread[] addThreads = new Thread[addThreadCount];
    for (int i = 0; i < addThreadCount; ++i) {
        addThreads[i] = new Thread(new Runnable() {

            @Override
            public void run() {
                try {
                    for (int j = 0; j < MAX_ROWS / addThreadCount; ++j) {
                        index.add(new MapBasedInputRow(0, Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", random.nextLong(), "max", 1)));
                    }
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }
            }
        });
        addThreads[i].start();
    }
    final AtomicInteger checkFailedCount = new AtomicInteger(0);
    Thread checkThread = new Thread(new Runnable() {

        @Override
        public void run() {
            while (!Thread.interrupted()) {
                for (Map.Entry<IncrementalIndex.TimeAndDims, Integer> row : index.getFacts().entrySet()) {
                    if (index.getMetricLongValue(row.getValue(), 0) != 1) {
                        checkFailedCount.addAndGet(1);
                    }
                }
            }
        }
    });
    checkThread.start();
    for (int i = 0; i < addThreadCount; ++i) {
        addThreads[i].join();
    }
    checkThread.interrupt();
    Assert.assertEquals(0, checkFailedCount.get());
}
Also used : Random(java.util.Random) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) Test(org.junit.Test)

Example 24 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexTest method populateIndex.

public static void populateIndex(long timestamp, IncrementalIndex index) throws IndexSizeExceededException {
    index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2")));
    index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "3", "dim2", "4")));
}
Also used : MapBasedInputRow(io.druid.data.input.MapBasedInputRow)

Example 25 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexTest method testDynamicSchemaRollup.

@Test
public void testDynamicSchemaRollup() throws IndexSizeExceededException {
    IncrementalIndex<Aggregator> index = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.NONE).build(), true, 10);
    closer.closeLater(index);
    index.add(new MapBasedInputRow(1481871600000L, Arrays.asList("name", "host"), ImmutableMap.<String, Object>of("name", "name1", "host", "host")));
    index.add(new MapBasedInputRow(1481871670000L, Arrays.asList("name", "table"), ImmutableMap.<String, Object>of("name", "name2", "table", "table")));
    index.add(new MapBasedInputRow(1481871600000L, Arrays.asList("name", "host"), ImmutableMap.<String, Object>of("name", "name1", "host", "host")));
    Assert.assertEquals(2, index.size());
}
Also used : OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) Aggregator(io.druid.query.aggregation.Aggregator) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) IncrementalIndexSchema(io.druid.segment.incremental.IncrementalIndexSchema) Test(org.junit.Test)

Aggregations

MapBasedInputRow (io.druid.data.input.MapBasedInputRow)73 Test (org.junit.Test)51 DateTime (org.joda.time.DateTime)38 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)32 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)30 File (java.io.File)19 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)13 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)12 InputRow (io.druid.data.input.InputRow)11 IncrementalIndexTest (io.druid.segment.data.IncrementalIndexTest)11 Interval (org.joda.time.Interval)11 IOException (java.io.IOException)10 DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)9 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)8 Row (io.druid.data.input.Row)7 TaskStatus (io.druid.indexing.common.TaskStatus)7 TaskToolbox (io.druid.indexing.common.TaskToolbox)7 TestIndexerMetadataStorageCoordinator (io.druid.indexing.test.TestIndexerMetadataStorageCoordinator)7 SpatialDimensionSchema (io.druid.data.input.impl.SpatialDimensionSchema)6 Pair (io.druid.java.util.common.Pair)6