Search in sources :

Example 11 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class GroupByQueryQueryToolChest method decorateObjectMapper.

@Override
public ObjectMapper decorateObjectMapper(final ObjectMapper objectMapper, final GroupByQuery query) {
    final boolean resultAsArray = query.getContextBoolean(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false);
    // Serializer that writes array- or map-based rows as appropriate, based on the "resultAsArray" setting.
    final JsonSerializer<ResultRow> serializer = new JsonSerializer<ResultRow>() {

        @Override
        public void serialize(final ResultRow resultRow, final JsonGenerator jg, final SerializerProvider serializers) throws IOException {
            if (resultAsArray) {
                jg.writeObject(resultRow.getArray());
            } else {
                jg.writeObject(resultRow.toMapBasedRow(query));
            }
        }
    };
    // Deserializer that can deserialize either array- or map-based rows.
    final JsonDeserializer<ResultRow> deserializer = new JsonDeserializer<ResultRow>() {

        @Override
        public ResultRow deserialize(final JsonParser jp, final DeserializationContext ctxt) throws IOException {
            if (jp.isExpectedStartObjectToken()) {
                final Row row = jp.readValueAs(Row.class);
                return ResultRow.fromLegacyRow(row, query);
            } else {
                return ResultRow.of(jp.readValueAs(Object[].class));
            }
        }
    };
    class GroupByResultRowModule extends SimpleModule {

        private GroupByResultRowModule() {
            addSerializer(ResultRow.class, serializer);
            addDeserializer(ResultRow.class, deserializer);
        }
    }
    final ObjectMapper newObjectMapper = objectMapper.copy();
    newObjectMapper.registerModule(new GroupByResultRowModule());
    return newObjectMapper;
}
Also used : JsonSerializer(com.fasterxml.jackson.databind.JsonSerializer) JsonDeserializer(com.fasterxml.jackson.databind.JsonDeserializer) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) DeserializationContext(com.fasterxml.jackson.databind.DeserializationContext) Row(org.apache.druid.data.input.Row) SerializerProvider(com.fasterxml.jackson.databind.SerializerProvider) SimpleModule(com.fasterxml.jackson.databind.module.SimpleModule) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) JsonParser(com.fasterxml.jackson.core.JsonParser)

Example 12 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class PrefetchableTextFilesFirehoseFactoryTest method testWithoutCache.

@Test
public void testWithoutCache() throws IOException {
    final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 0, 2048);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache");
    try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    Assert.assertEquals(0, factory.getCacheManager().getTotalCachedBytes());
    assertResult(rows);
    assertNumRemainingCacheFiles(firehoseTmpDir, 0);
}
Also used : Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Example 13 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class PrefetchableTextFilesFirehoseFactoryTest method testWithoutCacheAndFetchAgainstConnectionReset.

@Test
public void testWithoutCacheAndFetchAgainstConnectionReset() throws IOException {
    final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withConnectionResets(TEST_DIR, 0, 0, 2);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
    try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    Assert.assertEquals(0, factory.getCacheManager().getTotalCachedBytes());
    assertResult(rows);
    assertNumRemainingCacheFiles(firehoseTmpDir, 0);
}
Also used : Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Example 14 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class PrefetchableTextFilesFirehoseFactoryTest method testWithLargeCacheAndSmallFetch.

@Test
public void testWithLargeCacheAndSmallFetch() throws IOException {
    final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 1024);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testWithLargeCacheAndSmallFetch");
    try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    assertResult(rows);
    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
}
Also used : Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Example 15 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class PrefetchableTextFilesFirehoseFactoryTest method testRetry.

@Test
public void testRetry() throws IOException {
    final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 1);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testRetry");
    try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    assertResult(rows);
    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
}
Also used : Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Aggregations

Row (org.apache.druid.data.input.Row)54 Test (org.junit.Test)44 ArrayList (java.util.ArrayList)32 MapBasedRow (org.apache.druid.data.input.MapBasedRow)21 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)21 File (java.io.File)18 Firehose (org.apache.druid.data.input.Firehose)15 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)15 HashMap (java.util.HashMap)13 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)13 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)12 DimensionSpec (org.apache.druid.query.dimension.DimensionSpec)11 List (java.util.List)10 LongMeanAveragerFactory (org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory)9 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)8 InputRow (org.apache.druid.data.input.InputRow)8 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)7 IOException (java.io.IOException)6 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)6 Function (com.google.common.base.Function)5