use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class StringFirstTimeseriesQueryTest method setUp.
@Before
public void setUp() throws IndexSizeExceededException {
final SerializablePairLongStringSerde serde = new SerializablePairLongStringSerde();
ComplexMetrics.registerSerde(serde.getTypeName(), serde);
incrementalIndex = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.SECOND).withMetrics(new CountAggregatorFactory("cnt")).withMetrics(new StringFirstAggregatorFactory(FIRST_CLIENT_TYPE, CLIENT_TYPE, null, 1024)).build()).setMaxRowCount(1000).build();
incrementalIndex.add(new MapBasedInputRow(TIME1, Lists.newArrayList(VISITOR_ID, CLIENT_TYPE), ImmutableMap.of(VISITOR_ID, "0", CLIENT_TYPE, "iphone")));
incrementalIndex.add(new MapBasedInputRow(TIME1, Lists.newArrayList(VISITOR_ID, CLIENT_TYPE), ImmutableMap.of(VISITOR_ID, "1", CLIENT_TYPE, "iphone")));
incrementalIndex.add(new MapBasedInputRow(TIME2, Lists.newArrayList(VISITOR_ID, CLIENT_TYPE), ImmutableMap.of(VISITOR_ID, "0", CLIENT_TYPE, "android")));
queryableIndex = TestIndex.persistRealtimeAndLoadMMapped(incrementalIndex);
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class StringLastTimeseriesQueryTest method setUp.
@Before
public void setUp() throws IndexSizeExceededException {
final SerializablePairLongStringSerde serde = new SerializablePairLongStringSerde();
ComplexMetrics.registerSerde(serde.getTypeName(), serde);
incrementalIndex = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.SECOND).withMetrics(new CountAggregatorFactory("cnt")).withMetrics(new StringLastAggregatorFactory(LAST_CLIENT_TYPE, CLIENT_TYPE, null, 1024)).build()).setMaxRowCount(1000).build();
incrementalIndex.add(new MapBasedInputRow(TIME1, Lists.newArrayList(VISITOR_ID, CLIENT_TYPE), ImmutableMap.of(VISITOR_ID, "0", CLIENT_TYPE, "iphone")));
incrementalIndex.add(new MapBasedInputRow(TIME1, Lists.newArrayList(VISITOR_ID, CLIENT_TYPE), ImmutableMap.of(VISITOR_ID, "1", CLIENT_TYPE, "iphone")));
incrementalIndex.add(new MapBasedInputRow(TIME2, Lists.newArrayList(VISITOR_ID, CLIENT_TYPE), ImmutableMap.of(VISITOR_ID, "0", CLIENT_TYPE, "android")));
queryableIndex = TestIndex.persistRealtimeAndLoadMMapped(incrementalIndex);
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class SearchQueryRunnerTest method testSearchWithNullValueInDimension.
@Test
public void testSearchWithNullValueInDimension() throws Exception {
IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()).build()).setMaxRowCount(10).build();
index.add(new MapBasedInputRow(1481871600000L, Arrays.asList("name", "host"), ImmutableMap.of("name", "name1", "host", "host")));
index.add(new MapBasedInputRow(1481871670000L, Arrays.asList("name", "table"), ImmutableMap.of("name", "name2", "table", "table")));
SearchQuery searchQuery = Druids.newSearchQueryBuilder().dimensions(new DefaultDimensionSpec("table", "table")).dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).context(ImmutableMap.of("searchStrategy", "cursorOnly")).build();
QueryRunnerFactory factory = new SearchQueryRunnerFactory(SELECTOR, TOOL_CHEST, QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = factory.createRunner(new QueryableIndexSegment(TestIndex.persistRealtimeAndLoadMMapped(index), SegmentId.dummy("asdf")));
List<SearchHit> expectedHits = new ArrayList<>();
expectedHits.add(new SearchHit("table", "table", 1));
expectedHits.add(new SearchHit("table", NullHandling.defaultStringValue(), 1));
checkSearchQuery(searchQuery, runner, expectedHits);
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexIngestionTest method testMultithreadAddFacts.
@Test
public void testMultithreadAddFacts() throws Exception {
final IncrementalIndex index = indexCreator.createIndex(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.MINUTE).withMetrics(new LongMaxAggregatorFactory("max", "max")).build());
final int addThreadCount = 2;
Thread[] addThreads = new Thread[addThreadCount];
for (int i = 0; i < addThreadCount; ++i) {
addThreads[i] = new Thread(new Runnable() {
@Override
public void run() {
final Random random = ThreadLocalRandom.current();
try {
for (int j = 0; j < MAX_ROWS / addThreadCount; ++j) {
index.add(new MapBasedInputRow(0, Collections.singletonList("billy"), ImmutableMap.of("billy", random.nextLong(), "max", 1)));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
addThreads[i].start();
}
final AtomicInteger checkFailedCount = new AtomicInteger(0);
Thread checkThread = new Thread(new Runnable() {
@Override
public void run() {
while (!Thread.interrupted()) {
for (IncrementalIndexRow row : index.getFacts().keySet()) {
if (index.getMetricLongValue(row.getRowIndex(), 0) != 1) {
checkFailedCount.addAndGet(1);
}
}
}
}
});
checkThread.start();
for (int i = 0; i < addThreadCount; ++i) {
addThreads[i].join();
}
checkThread.interrupt();
Assert.assertEquals(0, checkFailedCount.get());
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexIngestionTest method testOnHeapIncrementalIndexClose.
@Test
public void testOnHeapIncrementalIndexClose() throws Exception {
// Prepare the mocks & set close() call count expectation to 1
Aggregator mockedAggregator = EasyMock.createMock(LongMaxAggregator.class);
mockedAggregator.close();
EasyMock.expectLastCall().times(1);
final IncrementalIndex genericIndex = indexCreator.createIndex(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.MINUTE).withMetrics(new LongMaxAggregatorFactory("max", "max")).build());
// This test is specific to the on-heap index
if (!(genericIndex instanceof OnheapIncrementalIndex)) {
return;
}
final OnheapIncrementalIndex index = (OnheapIncrementalIndex) genericIndex;
index.add(new MapBasedInputRow(0, Collections.singletonList("billy"), ImmutableMap.of("billy", 1, "max", 1)));
// override the aggregators with the mocks
index.concurrentGet(0)[0] = mockedAggregator;
// close the indexer and validate the expectations
EasyMock.replay(mockedAggregator);
index.close();
EasyMock.verify(mockedAggregator);
}
Aggregations