Search in sources :

Example 26 with JSONPathSpec

use of org.apache.druid.java.util.common.parsers.JSONPathSpec in project druid by druid-io.

the class OrcReaderTest method testDate2038.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testDate2038() throws IOException {
    final InputEntityReader reader = createReader(new TimestampSpec("time", "millis", null), DimensionsSpec.builder().setDimensionExclusions(Collections.singletonList("time")).build(), new OrcInputFormat(new JSONPathSpec(true, null), null, new Configuration()), "example/TestOrcFile.testDate2038.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        int actualRowCount = 0;
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        actualRowCount++;
        Assert.assertEquals(1, row.getDimensions().size());
        Assert.assertEquals(DateTimes.of("2038-05-05T12:34:56.1Z"), row.getTimestamp());
        Assert.assertEquals("2038-12-25T00:00:00.000Z", Iterables.getOnlyElement(row.getDimension("date")));
        while (iterator.hasNext()) {
            actualRowCount++;
            iterator.next();
        }
        Assert.assertEquals(212000, actualRowCount);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Example 27 with JSONPathSpec

use of org.apache.druid.java.util.common.parsers.JSONPathSpec in project druid by druid-io.

the class OrcReaderTest method testTest2.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testTest2() throws IOException {
    final InputFormat inputFormat = new OrcInputFormat(new JSONPathSpec(true, Collections.singletonList(new JSONPathFieldSpec(JSONPathFieldType.PATH, "col7-subcol7", "$.col7.subcol7"))), null, new Configuration());
    final InputEntityReader reader = createReader(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null), inputFormat, "example/test_2.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        Assert.assertEquals(DateTimes.of("2016-01-01T00:00:00.000Z"), row.getTimestamp());
        Assert.assertEquals("bar", Iterables.getOnlyElement(row.getDimension("col1")));
        Assert.assertEquals(ImmutableList.of("dat1", "dat2", "dat3"), row.getDimension("col2"));
        Assert.assertEquals("1.1", Iterables.getOnlyElement(row.getDimension("col3")));
        Assert.assertEquals("2", Iterables.getOnlyElement(row.getDimension("col4")));
        Assert.assertEquals("3.5", Iterables.getOnlyElement(row.getDimension("col5")));
        Assert.assertTrue(row.getDimension("col6").isEmpty());
        Assert.assertFalse(iterator.hasNext());
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) InputFormat(org.apache.druid.data.input.InputFormat) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) JSONPathFieldSpec(org.apache.druid.java.util.common.parsers.JSONPathFieldSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Example 28 with JSONPathSpec

use of org.apache.druid.java.util.common.parsers.JSONPathSpec in project druid by druid-io.

the class OrcReaderTest method testOrcSplitElim.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testOrcSplitElim() throws IOException {
    final InputEntityReader reader = createReader(new TimestampSpec("ts", "millis", null), new DimensionsSpec(null), new OrcInputFormat(new JSONPathSpec(true, null), null, new Configuration()), "example/orc_split_elim.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        int actualRowCount = 0;
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        actualRowCount++;
        Assert.assertEquals(DateTimes.of("1969-12-31T16:00:00.0Z"), row.getTimestamp());
        Assert.assertEquals("2", Iterables.getOnlyElement(row.getDimension("userid")));
        Assert.assertEquals("foo", Iterables.getOnlyElement(row.getDimension("string1")));
        Assert.assertEquals("0.8", Iterables.getOnlyElement(row.getDimension("subtype")));
        Assert.assertEquals("1.2", Iterables.getOnlyElement(row.getDimension("decimal1")));
        while (iterator.hasNext()) {
            actualRowCount++;
            iterator.next();
        }
        Assert.assertEquals(25000, actualRowCount);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Example 29 with JSONPathSpec

use of org.apache.druid.java.util.common.parsers.JSONPathSpec in project druid by druid-io.

the class InputRowParserSerdeTest method testFlattenParse.

@Test
public void testFlattenParse() throws Exception {
    List<JSONPathFieldSpec> fields = new ArrayList<>();
    fields.add(JSONPathFieldSpec.createNestedField("foobar1", "$.foo.bar1"));
    fields.add(JSONPathFieldSpec.createNestedField("foobar2", "$.foo.bar2"));
    fields.add(JSONPathFieldSpec.createNestedField("baz0", "$.baz[0]"));
    fields.add(JSONPathFieldSpec.createNestedField("baz1", "$.baz[1]"));
    fields.add(JSONPathFieldSpec.createNestedField("baz2", "$.baz[2]"));
    fields.add(JSONPathFieldSpec.createNestedField("hey0barx", "$.hey[0].barx"));
    fields.add(JSONPathFieldSpec.createNestedField("metA", "$.met.a"));
    fields.add(JSONPathFieldSpec.createNestedField("missing", "$.nonexistent.nested.field"));
    fields.add(JSONPathFieldSpec.createRootField("timestamp"));
    fields.add(JSONPathFieldSpec.createRootField("foo.bar1"));
    JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
    final StringInputRowParser parser = new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), DimensionsSpec.EMPTY, flattenSpec, null, null), null);
    final StringInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), StringInputRowParser.class);
    final InputRow parsed = parser2.parse("{\"blah\":[4,5,6], \"newmet\":5, \"foo\":{\"bar1\":\"aaa\", \"bar2\":\"bbb\"}, \"baz\":[1,2,3], \"timestamp\":\"2999\", \"foo.bar1\":\"Hello world!\", \"hey\":[{\"barx\":\"asdf\"}], \"met\":{\"a\":456}}");
    Assert.assertEquals(ImmutableList.of("foobar1", "foobar2", "baz0", "baz1", "baz2", "hey0barx", "metA", "missing", "timestamp", "foo.bar1", "blah", "newmet", "baz"), parsed.getDimensions());
    Assert.assertEquals(ImmutableList.of("aaa"), parsed.getDimension("foobar1"));
    Assert.assertEquals(ImmutableList.of("bbb"), parsed.getDimension("foobar2"));
    Assert.assertEquals(ImmutableList.of("1"), parsed.getDimension("baz0"));
    Assert.assertEquals(ImmutableList.of("2"), parsed.getDimension("baz1"));
    Assert.assertEquals(ImmutableList.of("3"), parsed.getDimension("baz2"));
    Assert.assertEquals(ImmutableList.of("Hello world!"), parsed.getDimension("foo.bar1"));
    Assert.assertEquals(ImmutableList.of("asdf"), parsed.getDimension("hey0barx"));
    Assert.assertEquals(ImmutableList.of("456"), parsed.getDimension("metA"));
    Assert.assertEquals(ImmutableList.of("5"), parsed.getDimension("newmet"));
    Assert.assertEquals(ImmutableList.of(), parsed.getDimension("missing"));
    Assert.assertEquals(DateTimes.of("2999").getMillis(), parsed.getTimestampFromEpoch());
    String testSpec = "{\"enabled\": true,\"useFieldDiscovery\": true, \"fields\": [\"parseThisRootField\"]}";
    final JSONPathSpec parsedSpec = jsonMapper.readValue(testSpec, JSONPathSpec.class);
    List<JSONPathFieldSpec> fieldSpecs = parsedSpec.getFields();
    Assert.assertEquals(JSONPathFieldType.ROOT, fieldSpecs.get(0).getType());
    Assert.assertEquals("parseThisRootField", fieldSpecs.get(0).getName());
    Assert.assertEquals("parseThisRootField", fieldSpecs.get(0).getExpr());
}
Also used : ArrayList(java.util.ArrayList) InputRow(org.apache.druid.data.input.InputRow) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) JSONPathFieldSpec(org.apache.druid.java.util.common.parsers.JSONPathFieldSpec) Test(org.junit.Test)

Example 30 with JSONPathSpec

use of org.apache.druid.java.util.common.parsers.JSONPathSpec in project druid by druid-io.

the class JSONParseSpecTest method testParseRowWithConditional.

@Test
public void testParseRowWithConditional() {
    final JSONParseSpec parseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo"))), new JSONPathSpec(true, ImmutableList.of(new JSONPathFieldSpec(JSONPathFieldType.PATH, "foo", "$.[?(@.maybe_object)].maybe_object.foo.test"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "baz", "$.maybe_object_2.foo.test"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar", "$.[?(@.something_else)].something_else.foo"))), null, false);
    final Map<String, Object> expected = new HashMap<>();
    expected.put("foo", new ArrayList());
    expected.put("baz", null);
    expected.put("bar", Collections.singletonList("test"));
    final Parser<String, Object> parser = parseSpec.makeParser();
    final Map<String, Object> parsedRow = parser.parseToMap("{\"something_else\": {\"foo\": \"test\"}}");
    Assert.assertNotNull(parsedRow);
    Assert.assertEquals(expected, parsedRow);
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) JSONPathFieldSpec(org.apache.druid.java.util.common.parsers.JSONPathFieldSpec) Test(org.junit.Test)

Aggregations

JSONPathSpec (org.apache.druid.java.util.common.parsers.JSONPathSpec)53 JSONPathFieldSpec (org.apache.druid.java.util.common.parsers.JSONPathFieldSpec)44 Test (org.junit.Test)42 InputEntityReader (org.apache.druid.data.input.InputEntityReader)33 InputRow (org.apache.druid.data.input.InputRow)32 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)30 InputRowSchema (org.apache.druid.data.input.InputRowSchema)28 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)24 InputRowListPlusRawValues (org.apache.druid.data.input.InputRowListPlusRawValues)17 ArrayList (java.util.ArrayList)7 JSONParseSpec (org.apache.druid.data.input.impl.JSONParseSpec)6 JsonInputFormat (org.apache.druid.data.input.impl.JsonInputFormat)6 Configuration (org.apache.hadoop.conf.Configuration)6 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 Before (org.junit.Before)5 StringDimensionSchema (org.apache.druid.data.input.impl.StringDimensionSchema)4 Module (com.fasterxml.jackson.databind.Module)3 BigDecimal (java.math.BigDecimal)3 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)3 HashMap (java.util.HashMap)2