use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testResetSanity.
@Test
public void testResetSanity() throws IOException {
IncrementalIndex index = indexCreator.createIndex();
DateTime t = DateTimes.nowUtc();
Interval interval = new Interval(t.minusMinutes(1), t.plusMinutes(1));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Collections.singletonList("sally"), ImmutableMap.of("sally", "bo")));
IncrementalIndexStorageAdapter adapter = new IncrementalIndexStorageAdapter(index);
for (boolean descending : Arrays.asList(false, true)) {
Sequence<Cursor> cursorSequence = adapter.makeCursors(new SelectorFilter("sally", "bo"), interval, VirtualColumns.EMPTY, Granularities.NONE, descending, null);
Cursor cursor = cursorSequence.limit(1).toList().get(0);
DimensionSelector dimSelector;
dimSelector = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Collections.singletonList("sally"), ImmutableMap.of("sally", "ah")));
// Cursor reset should not be affected by out of order values
cursor.reset();
dimSelector = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
}
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testCursorDictionaryRaceConditionFix.
@Test
public void testCursorDictionaryRaceConditionFix() throws Exception {
// Tests the dictionary ID race condition bug described at https://github.com/apache/druid/pull/6340
final IncrementalIndex index = indexCreator.createIndex();
final long timestamp = System.currentTimeMillis();
for (int i = 0; i < 5; i++) {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v1" + i)));
}
final StorageAdapter sa = new IncrementalIndexStorageAdapter(index);
Sequence<Cursor> cursors = sa.makeCursors(new DictionaryRaceTestFilter(index, timestamp), Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, null);
final AtomicInteger assertCursorsNotEmpty = new AtomicInteger(0);
cursors.map(cursor -> {
DimensionSelector dimSelector = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
int cardinality = dimSelector.getValueCardinality();
int rowNumInCursor = 0;
while (!cursor.isDone()) {
IndexedInts row = dimSelector.getRow();
row.forEach(i -> Assert.assertTrue(i < cardinality));
cursor.advance();
rowNumInCursor++;
}
Assert.assertEquals(5, rowNumInCursor);
assertCursorsNotEmpty.incrementAndGet();
return null;
}).toList();
Assert.assertEquals(1, assertCursorsNotEmpty.get());
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testCursoringAndIndexUpdationInterleaving.
@Test
public void testCursoringAndIndexUpdationInterleaving() throws Exception {
final IncrementalIndex index = indexCreator.createIndex();
final long timestamp = System.currentTimeMillis();
for (int i = 0; i < 2; i++) {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v1" + i)));
}
final StorageAdapter sa = new IncrementalIndexStorageAdapter(index);
Sequence<Cursor> cursors = sa.makeCursors(null, Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, null);
final AtomicInteger assertCursorsNotEmpty = new AtomicInteger(0);
cursors.map(cursor -> {
DimensionSelector dimSelector = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
int cardinality = dimSelector.getValueCardinality();
// index gets more rows at this point, while other thread is iterating over the cursor
try {
for (int i = 0; i < 1; i++) {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v2" + i)));
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
int rowNumInCursor = 0;
// and then, cursoring continues in the other thread
while (!cursor.isDone()) {
IndexedInts row = dimSelector.getRow();
row.forEach(i -> Assert.assertTrue(i < cardinality));
cursor.advance();
rowNumInCursor++;
}
Assert.assertEquals(2, rowNumInCursor);
assertCursorsNotEmpty.incrementAndGet();
return null;
}).toList();
Assert.assertEquals(1, assertCursorsNotEmpty.get());
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testCursoringAndSnapshot.
@Test
public void testCursoringAndSnapshot() throws Exception {
final IncrementalIndex index = indexCreator.createIndex();
final long timestamp = System.currentTimeMillis();
for (int i = 0; i < 2; i++) {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v0" + i)));
}
final StorageAdapter sa = new IncrementalIndexStorageAdapter(index);
Sequence<Cursor> cursors = sa.makeCursors(null, Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, null);
final AtomicInteger assertCursorsNotEmpty = new AtomicInteger(0);
cursors.map(cursor -> {
DimensionSelector dimSelector1A = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
int cardinalityA = dimSelector1A.getValueCardinality();
// index gets more rows at this point, while other thread is iterating over the cursor
try {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v1")));
} catch (Exception ex) {
throw new RuntimeException(ex);
}
DimensionSelector dimSelector1B = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
// index gets more rows at this point, while other thread is iterating over the cursor
try {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v2")));
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy2"), ImmutableMap.of("billy2", "v3")));
} catch (Exception ex) {
throw new RuntimeException(ex);
}
DimensionSelector dimSelector1C = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy", "billy"));
DimensionSelector dimSelector2D = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy2", "billy2"));
// index gets more rows at this point, while other thread is iterating over the cursor
try {
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v3")));
index.add(new MapBasedInputRow(timestamp, Collections.singletonList("billy3"), ImmutableMap.of("billy3", "")));
} catch (Exception ex) {
throw new RuntimeException(ex);
}
DimensionSelector dimSelector3E = cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("billy3", "billy3"));
int rowNumInCursor = 0;
// and then, cursoring continues in the other thread
while (!cursor.isDone()) {
IndexedInts rowA = dimSelector1A.getRow();
rowA.forEach(i -> Assert.assertTrue(i < cardinalityA));
IndexedInts rowB = dimSelector1B.getRow();
rowB.forEach(i -> Assert.assertTrue(i < cardinalityA));
IndexedInts rowC = dimSelector1C.getRow();
rowC.forEach(i -> Assert.assertTrue(i < cardinalityA));
IndexedInts rowD = dimSelector2D.getRow();
// no null id, so should get empty dims array
Assert.assertEquals(0, rowD.size());
IndexedInts rowE = dimSelector3E.getRow();
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(1, rowE.size());
// the null id
Assert.assertEquals(0, rowE.get(0));
} else {
Assert.assertEquals(0, rowE.size());
}
cursor.advance();
rowNumInCursor++;
}
Assert.assertEquals(2, rowNumInCursor);
assertCursorsNotEmpty.incrementAndGet();
return null;
}).toList();
Assert.assertEquals(1, assertCursorsNotEmpty.get());
}
use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class ApproximateHistogramFoldingSerdeTest method testExtractor.
@Test
public void testExtractor() {
final ApproximateHistogramFoldingSerde serde = new ApproximateHistogramFoldingSerde();
final ComplexMetricExtractor extractor = serde.getExtractor();
final Map<String, Object> theMap = new HashMap<>();
theMap.put("nullValue", null);
theMap.put("listValue", ImmutableList.of("1.0", 2, 3.0));
theMap.put("stringValue", "1.0");
theMap.put("numberValue", 1.0);
final MapBasedInputRow row = new MapBasedInputRow(0L, ImmutableList.of(), theMap);
Assert.assertEquals("nullValue", new ApproximateHistogram(0), extractor.extractValue(row, "nullValue"));
Assert.assertEquals("missingValue", new ApproximateHistogram(0), extractor.extractValue(row, "missingValue"));
Assert.assertEquals("listValue", makeHistogram(1, 2, 3), extractor.extractValue(row, "listValue"));
Assert.assertEquals("stringValue", makeHistogram(1), extractor.extractValue(row, "stringValue"));
Assert.assertEquals("numberValue", makeHistogram(1), extractor.extractValue(row, "numberValue"));
}
Aggregations