use of org.apache.druid.query.aggregation.Aggregator in project druid by druid-io.
the class DoublesSketchToRankPostAggregatorTest method emptySketch.
@Test
public void emptySketch() {
final TestDoubleColumnSelectorImpl selector = new TestDoubleColumnSelectorImpl(null);
final Aggregator agg = new DoublesSketchBuildAggregator(selector, 8);
final Map<String, Object> fields = new HashMap<>();
fields.put("sketch", agg.get());
final PostAggregator postAgg = new DoublesSketchToRankPostAggregator("rank", new FieldAccessPostAggregator("field", "sketch"), 0);
final double rank = (double) postAgg.compute(fields);
Assert.assertTrue(Double.isNaN(rank));
}
use of org.apache.druid.query.aggregation.Aggregator in project druid by druid-io.
the class SketchToStringPostAggregatorTest method testCompute.
@Test
public void testCompute() {
// not going to iterate over the selector since getting a summary of an empty sketch is sufficient
final TestObjectColumnSelector selector = new TestObjectColumnSelector(new Object[0]);
final Aggregator agg = new SketchAggregator(selector, 4096);
final Map<String, Object> fields = new HashMap<>();
fields.put("sketch", agg.get());
final PostAggregator postAgg = new SketchToStringPostAggregator("summary", new FieldAccessPostAggregator("field", "sketch"));
final String summary = (String) postAgg.compute(fields);
Assert.assertNotNull(summary);
Assert.assertTrue(summary.contains("SUMMARY"));
}
use of org.apache.druid.query.aggregation.Aggregator in project druid by druid-io.
the class InputRowSerde method toBytes.
public static SerializeResult toBytes(final Map<String, IndexSerdeTypeHelper> typeHelperMap, final InputRow row, AggregatorFactory[] aggs) {
try {
List<String> parseExceptionMessages = new ArrayList<>();
ByteArrayDataOutput out = ByteStreams.newDataOutput();
// write timestamp
out.writeLong(row.getTimestampFromEpoch());
// writing all dimensions
List<String> dimList = row.getDimensions();
WritableUtils.writeVInt(out, dimList.size());
for (String dim : dimList) {
IndexSerdeTypeHelper typeHelper = typeHelperMap.get(dim);
if (typeHelper == null) {
typeHelper = STRING_HELPER;
}
writeString(dim, out);
try {
typeHelper.serialize(out, row.getRaw(dim));
} catch (ParseException pe) {
parseExceptionMessages.add(pe.getMessage());
}
}
// writing all metrics
Supplier<InputRow> supplier = () -> row;
WritableUtils.writeVInt(out, aggs.length);
for (AggregatorFactory aggFactory : aggs) {
String k = aggFactory.getName();
writeString(k, out);
try (Aggregator agg = aggFactory.factorize(IncrementalIndex.makeColumnSelectorFactory(VirtualColumns.EMPTY, aggFactory, supplier, true))) {
try {
agg.aggregate();
} catch (ParseException e) {
// "aggregate" can throw ParseExceptions if a selector expects something but gets something else.
log.debug(e, "Encountered parse error, skipping aggregator[%s].", k);
parseExceptionMessages.add(e.getMessage());
}
final ColumnType type = aggFactory.getIntermediateType();
if (agg.isNull()) {
out.writeByte(NullHandling.IS_NULL_BYTE);
} else {
out.writeByte(NullHandling.IS_NOT_NULL_BYTE);
if (type.is(ValueType.FLOAT)) {
out.writeFloat(agg.getFloat());
} else if (type.is(ValueType.LONG)) {
WritableUtils.writeVLong(out, agg.getLong());
} else if (type.is(ValueType.DOUBLE)) {
out.writeDouble(agg.getDouble());
} else if (type.is(ValueType.COMPLEX)) {
Object val = agg.get();
ComplexMetricSerde serde = getComplexMetricSerde(type.getComplexTypeName());
writeBytes(serde.toBytes(val), out);
} else {
throw new IAE("Unable to serialize type[%s]", type.asTypeString());
}
}
}
}
return new SerializeResult(out.toByteArray(), parseExceptionMessages);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
use of org.apache.druid.query.aggregation.Aggregator in project druid by druid-io.
the class FinalizingFieldAccessPostAggregatorTest method testComputeWithoutFinalizing.
@Test(expected = UnsupportedOperationException.class)
public void testComputeWithoutFinalizing() {
String aggName = "rows";
Aggregator agg = new CountAggregator();
agg.aggregate();
agg.aggregate();
agg.aggregate();
Map<String, Object> metricValues = new HashMap<>();
metricValues.put(aggName, agg.get());
FinalizingFieldAccessPostAggregator postAgg = new FinalizingFieldAccessPostAggregator("final_rows", aggName);
Assert.assertEquals(new Long(3L), postAgg.compute(metricValues));
}
use of org.apache.druid.query.aggregation.Aggregator in project druid by druid-io.
the class FloatLastAggregationTest method testFloatLastAggregatorWithTimeColumn.
@Test
public void testFloatLastAggregatorWithTimeColumn() {
Aggregator agg = new FloatLastAggregatorFactory("billy", "nilly", "customTime").factorize(colSelectorFactory);
aggregate(agg);
aggregate(agg);
aggregate(agg);
aggregate(agg);
Pair<Long, Float> result = (Pair<Long, Float>) agg.get();
Assert.assertEquals(customTimes[1], result.lhs.longValue());
Assert.assertEquals(floats[1], result.rhs, 0.0001);
Assert.assertEquals((long) floats[1], agg.getLong());
Assert.assertEquals(floats[1], agg.getFloat(), 0.0001);
}
Aggregations