use of org.apache.druid.data.input.Row in project druid by druid-io.
the class PrefetchableTextFilesFirehoseFactoryTest method testReconnectWithCache.
@Test
public void testReconnectWithCache() throws IOException {
final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 0);
final File firehoseTmpDir = createFirehoseTmpDir("testReconnectWithCache");
for (int i = 0; i < 5; i++) {
final List<Row> rows = new ArrayList<>();
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
if (i > 0) {
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
}
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
}
assertResult(rows);
assertNumRemainingCacheFiles(firehoseTmpDir, 2);
}
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class PrefetchableTextFilesFirehoseFactoryTest method testWithSmallCacheAndLargeFetch.
@Test
public void testWithSmallCacheAndLargeFetch() throws IOException {
final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 1024, 2048);
final List<Row> rows = new ArrayList<>();
final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch");
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
}
assertResult(rows);
assertNumRemainingCacheFiles(firehoseTmpDir, 1);
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class MovingAverageQueryToolChest method makePostComputeManipulatorFn.
@Override
public Function<Row, Row> makePostComputeManipulatorFn(MovingAverageQuery query, MetricManipulationFn fn) {
return new Function<Row, Row>() {
@Override
public Row apply(Row result) {
MapBasedRow mRow = (MapBasedRow) result;
final Map<String, Object> values = new HashMap<>(mRow.getEvent());
for (AggregatorFactory agg : query.getAggregatorSpecs()) {
Object aggVal = values.get(agg.getName());
if (aggVal != null) {
values.put(agg.getName(), fn.manipulate(agg, aggVal));
} else {
values.put(agg.getName(), null);
}
}
for (AveragerFactory<?, ?> avg : query.getAveragerSpecs()) {
Object aggVal = values.get(avg.getName());
if (aggVal != null) {
values.put(avg.getName(), fn.manipulate(new AveragerFactoryWrapper<>(avg, avg.getName() + "_"), aggVal));
} else {
values.put(avg.getName(), null);
}
}
return new MapBasedRow(result.getTimestamp(), values);
}
};
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class IncrementalIndexMultiValueSpecTest method test.
@Test
public void test() throws IndexSizeExceededException {
DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("string1", DimensionSchema.MultiValueHandling.ARRAY, true), new StringDimensionSchema("string2", DimensionSchema.MultiValueHandling.SORTED_ARRAY, true), new StringDimensionSchema("string3", DimensionSchema.MultiValueHandling.SORTED_SET, true)));
IncrementalIndexSchema schema = new IncrementalIndexSchema(0, new TimestampSpec("ds", "auto", null), Granularities.ALL, VirtualColumns.EMPTY, dimensionsSpec, new AggregatorFactory[0], false);
Map<String, Object> map = new HashMap<String, Object>() {
@Override
public Object get(Object key) {
if (((String) key).startsWith("string")) {
return Arrays.asList("xsd", "aba", "fds", "aba");
}
if (((String) key).startsWith("float")) {
return Arrays.asList(3.92f, -2.76f, 42.153f, Float.NaN, -2.76f, -2.76f);
}
if (((String) key).startsWith("long")) {
return Arrays.asList(-231238789L, 328L, 923L, 328L, -2L, 0L);
}
return null;
}
};
IncrementalIndex index = indexCreator.createIndex(schema);
index.add(new MapBasedInputRow(0, Arrays.asList("string1", "string2", "string3", "float1", "float2", "float3", "long1", "long2", "long3"), map));
Row row = index.iterator().next();
Assert.assertEquals(Lists.newArrayList("xsd", "aba", "fds", "aba"), row.getRaw("string1"));
Assert.assertEquals(Lists.newArrayList("aba", "aba", "fds", "xsd"), row.getRaw("string2"));
Assert.assertEquals(Lists.newArrayList("aba", "fds", "xsd"), row.getRaw("string3"));
}
use of org.apache.druid.data.input.Row in project druid by druid-io.
the class DoubleMeanAggregationTest method testBufferAggretatorUsingGroupByQuery.
@Test
@Parameters(method = "doVectorize")
public void testBufferAggretatorUsingGroupByQuery(boolean doVectorize) throws Exception {
GroupByQuery query = new GroupByQuery.Builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval("1970/2050").setAggregatorSpecs(new DoubleMeanAggregatorFactory("meanOnDouble", SimpleTestIndex.DOUBLE_COL), new DoubleMeanAggregatorFactory("meanOnString", SimpleTestIndex.SINGLE_VALUE_DOUBLE_AS_STRING_DIM), new DoubleMeanAggregatorFactory("meanOnMultiValue", SimpleTestIndex.MULTI_VALUE_DOUBLE_AS_STRING_DIM)).setContext(ImmutableMap.of(QueryContexts.VECTORIZE_KEY, doVectorize)).build();
// do json serialization and deserialization of query to ensure there are no serde issues
ObjectMapper jsonMapper = groupByQueryTestHelper.getObjectMapper();
query = (GroupByQuery) jsonMapper.readValue(jsonMapper.writeValueAsString(query), Query.class);
Sequence<ResultRow> seq = groupByQueryTestHelper.runQueryOnSegmentsObjs(segments, query);
Row result = Iterables.getOnlyElement(seq.toList()).toMapBasedRow(query);
Assert.assertEquals(6.2d, result.getMetric("meanOnDouble").doubleValue(), 0.0001d);
Assert.assertEquals(6.2d, result.getMetric("meanOnString").doubleValue(), 0.0001d);
Assert.assertEquals(4.1333d, result.getMetric("meanOnMultiValue").doubleValue(), 0.0001d);
}
Aggregations