use of org.apache.druid.query.aggregation.SerializablePairLongString in project druid by druid-io.
the class StringLastAggregator method aggregate.
@Override
public void aggregate() {
if (timeSelector.isNull()) {
return;
}
if (needsFoldCheck) {
// Less efficient code path when folding is a possibility (we must read the value selector first just in case
// it's a foldable object).
final SerializablePairLongString inPair = StringFirstLastUtils.readPairFromSelectors(timeSelector, valueSelector);
if (inPair != null && inPair.lhs >= lastTime) {
lastTime = inPair.lhs;
lastValue = StringUtils.fastLooseChop(inPair.rhs, maxStringBytes);
}
} else {
final long time = timeSelector.getLong();
if (time >= lastTime) {
final String value = DimensionHandlerUtils.convertObjectToString(valueSelector.getObject());
lastTime = time;
lastValue = StringUtils.fastLooseChop(value, maxStringBytes);
}
}
}
use of org.apache.druid.query.aggregation.SerializablePairLongString in project druid by druid-io.
the class StringFirstBufferAggregatorTest method testBufferAggregate.
@Test
public void testBufferAggregate() {
final long[] timestamps = { 1526724600L, 1526724700L, 1526724800L, 1526725900L, 1526725000L };
final String[] strings = { "AAAA", "BBBB", "CCCC", "DDDD", "EEEE" };
Integer maxStringBytes = 1024;
TestLongColumnSelector longColumnSelector = new TestLongColumnSelector(timestamps);
TestObjectColumnSelector<String> objectColumnSelector = new TestObjectColumnSelector<>(strings);
StringFirstAggregatorFactory factory = new StringFirstAggregatorFactory("billy", "billy", null, maxStringBytes);
StringFirstBufferAggregator agg = new StringFirstBufferAggregator(longColumnSelector, objectColumnSelector, maxStringBytes, false);
ByteBuffer buf = ByteBuffer.allocate(factory.getMaxIntermediateSize());
int position = 0;
agg.init(buf, position);
// noinspection ForLoopReplaceableByForEach
for (int i = 0; i < timestamps.length; i++) {
aggregateBuffer(longColumnSelector, objectColumnSelector, agg, buf, position);
}
SerializablePairLongString sp = ((SerializablePairLongString) agg.get(buf, position));
Assert.assertEquals("expected last string value", strings[0], sp.rhs);
Assert.assertEquals("last string timestamp is the biggest", new Long(timestamps[0]), sp.lhs);
}
use of org.apache.druid.query.aggregation.SerializablePairLongString in project druid by druid-io.
the class StringFirstLastUtilsTest method testWritePairThenReadPairAtMiddleBuffer.
@Test
public void testWritePairThenReadPairAtMiddleBuffer() {
int positionAtMiddle = 60;
ByteBuffer buf = ByteBuffer.allocate(BUFFER_CAPACITY);
StringFirstLastUtils.writePair(buf, positionAtMiddle, PAIR_TO_WRITE, MAX_BYTE_TO_WRITE);
SerializablePairLongString actual = StringFirstLastUtils.readPair(buf, positionAtMiddle);
Assert.assertEquals(PAIR_TO_WRITE, actual);
}
use of org.apache.druid.query.aggregation.SerializablePairLongString in project druid by druid-io.
the class StringLastAggregationTest method testCombine.
@Test
public void testCombine() {
SerializablePairLongString pair1 = new SerializablePairLongString(1467225000L, "AAAA");
SerializablePairLongString pair2 = new SerializablePairLongString(1467240000L, "BBBB");
Assert.assertEquals(pair2, stringLastAggFactory.combine(pair1, pair2));
}
use of org.apache.druid.query.aggregation.SerializablePairLongString in project druid by druid-io.
the class StringLastTimeseriesQueryTest method testTimeseriesQuery.
@Test
public void testTimeseriesQuery() {
TimeseriesQueryEngine engine = new TimeseriesQueryEngine();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(ImmutableList.of(new StringLastAggregatorFactory("nonfolding", CLIENT_TYPE, null, 1024), new StringLastAggregatorFactory("folding", LAST_CLIENT_TYPE, null, 1024), new StringLastAggregatorFactory("nonexistent", "nonexistent", null, 1024), new StringLastAggregatorFactory("numeric", "cnt", null, 1024))).build();
List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(TIME1, new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("nonfolding", new SerializablePairLongString(TIME2.getMillis(), "android")).put("folding", new SerializablePairLongString(TIME2.getMillis(), "android")).put("nonexistent", new SerializablePairLongString(DateTimes.MIN.getMillis(), null)).put("numeric", new SerializablePairLongString(DateTimes.MIN.getMillis(), null)).build())));
final Iterable<Result<TimeseriesResultValue>> iiResults = engine.process(query, new IncrementalIndexStorageAdapter(incrementalIndex)).toList();
final Iterable<Result<TimeseriesResultValue>> qiResults = engine.process(query, new QueryableIndexStorageAdapter(queryableIndex)).toList();
TestHelper.assertExpectedResults(expectedResults, iiResults, "incremental index");
TestHelper.assertExpectedResults(expectedResults, qiResults, "queryable index");
}
Aggregations