Search in sources :

Example 51 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class SqlInputSourceTest method assertResult.

private void assertResult(List<Row> rows, List<String> sqls) {
    Assert.assertEquals(10 * sqls.size(), rows.size());
    rows.sort(Comparator.comparing(Row::getTimestamp).thenComparingInt(r -> Integer.valueOf(r.getDimension("a").get(0))).thenComparingInt(r -> Integer.valueOf(r.getDimension("b").get(0))));
    int rowCount = 0;
    for (int i = 0; i < 10; i++) {
        for (int j = 0; j < sqls.size(); j++) {
            final Row row = rows.get(rowCount);
            String timestampSt = StringUtils.format("2011-01-12T00:0%s:00.000Z", i);
            Assert.assertEquals(timestampSt, row.getTimestamp().toString());
            Assert.assertEquals(i, Integer.valueOf(row.getDimension("a").get(0)).intValue());
            Assert.assertEquals(i, Integer.valueOf(row.getDimension("b").get(0)).intValue());
            rowCount++;
        }
    }
}
Also used : Module(com.fasterxml.jackson.databind.Module) JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) Arrays(java.util.Arrays) MetadataStorageConnectorConfig(org.apache.druid.metadata.MetadataStorageConnectorConfig) InputSplit(org.apache.druid.data.input.InputSplit) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRowListPlusRawValues(org.apache.druid.data.input.InputRowListPlusRawValues) InputRowSchema(org.apache.druid.data.input.InputRowSchema) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) JsonTypeName(com.fasterxml.jackson.annotation.JsonTypeName) ImmutableList(com.google.common.collect.ImmutableList) DBI(org.skife.jdbi.v2.DBI) InputSourceReader(org.apache.druid.data.input.InputSourceReader) CloseableIterator(org.apache.druid.java.util.common.parsers.CloseableIterator) FileUtils(org.apache.druid.java.util.common.FileUtils) Before(org.junit.Before) AfterClass(org.junit.AfterClass) ImmutableSet(com.google.common.collect.ImmutableSet) SQLFirehoseDatabaseConnector(org.apache.druid.metadata.SQLFirehoseDatabaseConnector) JdbcAccessSecurityConfig(org.apache.druid.server.initialization.JdbcAccessSecurityConfig) ColumnsFilter(org.apache.druid.data.input.ColumnsFilter) InputFormat(org.apache.druid.data.input.InputFormat) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) StringUtils(org.apache.druid.java.util.common.StringUtils) Set(java.util.Set) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) Test(org.junit.Test) IOException(java.io.IOException) EasyMock(org.easymock.EasyMock) EqualsVerifier(nl.jqno.equalsverifier.EqualsVerifier) Collectors(java.util.stream.Collectors) File(java.io.File) Objects(java.util.Objects) InputRow(org.apache.druid.data.input.InputRow) BasicDataSource(org.apache.commons.dbcp2.BasicDataSource) TestHelper(org.apache.druid.segment.TestHelper) List(java.util.List) Rule(org.junit.Rule) Stream(java.util.stream.Stream) TestDerbyConnector(org.apache.druid.metadata.TestDerbyConnector) Assert(org.junit.Assert) Comparator(java.util.Comparator) Row(org.apache.druid.data.input.Row) InputRow(org.apache.druid.data.input.InputRow)

Example 52 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class SqlFirehoseFactoryTest method assertResult.

private void assertResult(List<Row> rows, List<String> sqls) {
    Assert.assertEquals(10 * sqls.size(), rows.size());
    rows.sort(Comparator.comparing(Row::getTimestamp).thenComparingInt(r -> Integer.valueOf(r.getDimension("a").get(0))).thenComparingInt(r -> Integer.valueOf(r.getDimension("b").get(0))));
    int rowCount = 0;
    for (int i = 0; i < 10; i++) {
        for (int j = 0; j < sqls.size(); j++) {
            final Row row = rows.get(rowCount);
            String timestampSt = StringUtils.format("2011-01-12T00:0%s:00.000Z", i);
            Assert.assertEquals(timestampSt, row.getTimestamp().toString());
            Assert.assertEquals(i, Integer.valueOf(row.getDimension("a").get(0)).intValue());
            Assert.assertEquals(i, Integer.valueOf(row.getDimension("b").get(0)).intValue());
            rowCount++;
        }
    }
}
Also used : Arrays(java.util.Arrays) BeforeClass(org.junit.BeforeClass) TimeAndDimsParseSpec(org.apache.druid.data.input.impl.TimeAndDimsParseSpec) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) ImmutableList(com.google.common.collect.ImmutableList) FileUtils(org.apache.druid.java.util.common.FileUtils) SqlTestUtils(org.apache.druid.metadata.input.SqlTestUtils) AfterClass(org.junit.AfterClass) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) StringUtils(org.apache.druid.java.util.common.StringUtils) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) Test(org.junit.Test) IOException(java.io.IOException) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) File(java.io.File) Firehose(org.apache.druid.data.input.Firehose) TestHelper(org.apache.druid.segment.TestHelper) List(java.util.List) Rule(org.junit.Rule) TestDerbyConnector(org.apache.druid.metadata.TestDerbyConnector) Assert(org.junit.Assert) Comparator(java.util.Comparator) TransformSpec(org.apache.druid.segment.transform.TransformSpec) Row(org.apache.druid.data.input.Row)

Example 53 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class SqlFirehoseFactoryTest method testWithoutCache.

@Test
public void testWithoutCache() throws IOException {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    final SqlFirehoseFactory factory = new SqlFirehoseFactory(SQLLIST1, 0L, null, null, null, true, testUtils.getDerbyFirehoseConnector(), mapper);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache");
    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    assertResult(rows, SQLLIST1);
    assertNumRemainingCacheFiles(firehoseTmpDir, 0);
    testUtils.dropTable(TABLE_NAME_1);
}
Also used : SqlTestUtils(org.apache.druid.metadata.input.SqlTestUtils) Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Example 54 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class SqlFirehoseFactoryTest method testWithCacheAndFetch.

@Test
public void testWithCacheAndFetch() throws IOException {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    testUtils.createAndUpdateTable(TABLE_NAME_2, 10);
    final SqlFirehoseFactory factory = new SqlFirehoseFactory(SQLLIST2, null, null, 0L, null, true, testUtils.getDerbyFirehoseConnector(), mapper);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch");
    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    assertResult(rows, SQLLIST2);
    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
    testUtils.dropTable(TABLE_NAME_1);
    testUtils.dropTable(TABLE_NAME_2);
}
Also used : SqlTestUtils(org.apache.druid.metadata.input.SqlTestUtils) Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Aggregations

Row (org.apache.druid.data.input.Row)54 Test (org.junit.Test)44 ArrayList (java.util.ArrayList)32 MapBasedRow (org.apache.druid.data.input.MapBasedRow)21 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)21 File (java.io.File)18 Firehose (org.apache.druid.data.input.Firehose)15 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)15 HashMap (java.util.HashMap)13 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)13 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)12 DimensionSpec (org.apache.druid.query.dimension.DimensionSpec)11 List (java.util.List)10 LongMeanAveragerFactory (org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory)9 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)8 InputRow (org.apache.druid.data.input.InputRow)8 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)7 IOException (java.io.IOException)6 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)6 Function (com.google.common.base.Function)5