Search in sources :

Example 6 with TimestampSpec

use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.

the class IncrementalIndexMultiValueSpecTest method test.

@Test
public void test() throws IndexSizeExceededException {
    DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.<DimensionSchema>asList(new StringDimensionSchema("string1", DimensionSchema.MultiValueHandling.ARRAY), new StringDimensionSchema("string2", DimensionSchema.MultiValueHandling.SORTED_ARRAY), new StringDimensionSchema("string3", DimensionSchema.MultiValueHandling.SORTED_SET)), null, null);
    IncrementalIndexSchema schema = new IncrementalIndexSchema(0, new TimestampSpec("ds", "auto", null), Granularities.ALL, VirtualColumns.EMPTY, dimensionsSpec, new AggregatorFactory[0], false);
    Map<String, Object> map = new HashMap<String, Object>() {

        @Override
        public Object get(Object key) {
            if (((String) key).startsWith("string")) {
                return Arrays.asList("xsd", "aba", "fds", "aba");
            }
            if (((String) key).startsWith("float")) {
                return Arrays.<Float>asList(3.92f, -2.76f, 42.153f, Float.NaN, -2.76f, -2.76f);
            }
            if (((String) key).startsWith("long")) {
                return Arrays.<Long>asList(-231238789L, 328L, 923L, 328L, -2L, 0L);
            }
            return null;
        }
    };
    IncrementalIndex<?> index = new OnheapIncrementalIndex(schema, true, 10000);
    index.add(new MapBasedInputRow(0, Arrays.asList("string1", "string2", "string3", "float1", "float2", "float3", "long1", "long2", "long3"), map));
    Row row = index.iterator().next();
    Assert.assertEquals(Lists.newArrayList("xsd", "aba", "fds", "aba"), row.getRaw("string1"));
    Assert.assertEquals(Lists.newArrayList("aba", "aba", "fds", "xsd"), row.getRaw("string2"));
    Assert.assertEquals(Lists.newArrayList("aba", "fds", "xsd"), row.getRaw("string3"));
}
Also used : HashMap(java.util.HashMap) StringDimensionSchema(io.druid.data.input.impl.StringDimensionSchema) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Row(io.druid.data.input.Row) Test(org.junit.Test)

Example 7 with TimestampSpec

use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.

the class DataSchema method getParser.

@JsonIgnore
public InputRowParser getParser() {
    if (parser == null) {
        log.warn("No parser has been specified");
        return null;
    }
    final InputRowParser inputRowParser = jsonMapper.convertValue(this.parser, InputRowParser.class);
    final Set<String> dimensionExclusions = Sets.newHashSet();
    for (AggregatorFactory aggregator : aggregators) {
        dimensionExclusions.addAll(aggregator.requiredFields());
        dimensionExclusions.add(aggregator.getName());
    }
    if (inputRowParser.getParseSpec() != null) {
        final DimensionsSpec dimensionsSpec = inputRowParser.getParseSpec().getDimensionsSpec();
        final TimestampSpec timestampSpec = inputRowParser.getParseSpec().getTimestampSpec();
        // exclude timestamp from dimensions by default, unless explicitly included in the list of dimensions
        if (timestampSpec != null) {
            final String timestampColumn = timestampSpec.getTimestampColumn();
            if (!(dimensionsSpec.hasCustomDimensions() && dimensionsSpec.getDimensionNames().contains(timestampColumn))) {
                dimensionExclusions.add(timestampColumn);
            }
        }
        if (dimensionsSpec != null) {
            final Set<String> metSet = Sets.newHashSet();
            for (AggregatorFactory aggregator : aggregators) {
                metSet.add(aggregator.getName());
            }
            final Set<String> dimSet = Sets.newHashSet(dimensionsSpec.getDimensionNames());
            final Set<String> overlap = Sets.intersection(metSet, dimSet);
            if (!overlap.isEmpty()) {
                throw new IAE("Cannot have overlapping dimensions and metrics of the same name. Please change the name of the metric. Overlap: %s", overlap);
            }
            return inputRowParser.withParseSpec(inputRowParser.getParseSpec().withDimensionsSpec(dimensionsSpec.withDimensionExclusions(Sets.difference(dimensionExclusions, dimSet))));
        } else {
            return inputRowParser;
        }
    } else {
        log.warn("No parseSpec in parser has been specified.");
        return inputRowParser;
    }
}
Also used : TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) InputRowParser(io.druid.data.input.impl.InputRowParser) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) IAE(io.druid.java.util.common.IAE) JsonIgnore(com.fasterxml.jackson.annotation.JsonIgnore)

Example 8 with TimestampSpec

use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.

the class RealtimePlumberSchoolTest method setUp.

@Before
public void setUp() throws Exception {
    tmpDir = Files.createTempDir();
    ObjectMapper jsonMapper = new DefaultObjectMapper();
    schema = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    schema2 = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.YEAR, Granularities.NONE, null), jsonMapper);
    announcer = EasyMock.createMock(DataSegmentAnnouncer.class);
    announcer.announceSegment(EasyMock.<DataSegment>anyObject());
    EasyMock.expectLastCall().anyTimes();
    segmentPublisher = EasyMock.createNiceMock(SegmentPublisher.class);
    dataSegmentPusher = EasyMock.createNiceMock(DataSegmentPusher.class);
    handoffNotifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    handoffNotifier = EasyMock.createNiceMock(SegmentHandoffNotifier.class);
    EasyMock.expect(handoffNotifierFactory.createSegmentHandoffNotifier(EasyMock.anyString())).andReturn(handoffNotifier).anyTimes();
    EasyMock.expect(handoffNotifier.registerSegmentHandoffCallback(EasyMock.<SegmentDescriptor>anyObject(), EasyMock.<Executor>anyObject(), EasyMock.<Runnable>anyObject())).andReturn(true).anyTimes();
    emitter = EasyMock.createMock(ServiceEmitter.class);
    EasyMock.replay(announcer, segmentPublisher, dataSegmentPusher, handoffNotifierFactory, handoffNotifier, emitter);
    tuningConfig = new RealtimeTuningConfig(1, null, null, null, new IntervalStartVersioningPolicy(), rejectionPolicy, null, null, null, buildV9Directly, 0, 0, false, null);
    realtimePlumberSchool = new RealtimePlumberSchool(emitter, new DefaultQueryRunnerFactoryConglomerate(Maps.<Class<? extends Query>, QueryRunnerFactory>newHashMap()), dataSegmentPusher, announcer, segmentPublisher, handoffNotifierFactory, MoreExecutors.sameThreadExecutor(), TestHelper.getTestIndexMerger(), TestHelper.getTestIndexMergerV9(), TestHelper.getTestIndexIO(), MapCache.create(0), FireDepartmentTest.NO_CACHE_CONFIG, TestHelper.getObjectMapper());
    metrics = new FireDepartmentMetrics();
    plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema, tuningConfig, metrics);
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) DataSegmentAnnouncer(io.druid.server.coordination.DataSegmentAnnouncer) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) RealtimeTuningConfig(io.druid.segment.indexing.RealtimeTuningConfig) DataSchema(io.druid.segment.indexing.DataSchema) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) SegmentPublisher(io.druid.segment.realtime.SegmentPublisher) Executor(java.util.concurrent.Executor) FireDepartmentMetrics(io.druid.segment.realtime.FireDepartmentMetrics) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Before(org.junit.Before)

Example 9 with TimestampSpec

use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.

the class DataSchemaTest method testExplicitInclude.

@Test
public void testExplicitInclude() throws Exception {
    Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2")), ImmutableList.of("dimC"), null), null, null), null), new TypeReference<Map<String, Object>>() {
    });
    DataSchema schema = new DataSchema("test", parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper);
    Assert.assertEquals(ImmutableSet.of("dimC", "col1", "metric1", "metric2"), schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions());
}
Also used : DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) Map(java.util.Map) ArbitraryGranularitySpec(io.druid.segment.indexing.granularity.ArbitraryGranularitySpec) Test(org.junit.Test)

Example 10 with TimestampSpec

use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.

the class DataSchemaTest method testSerde.

@Test
public void testSerde() throws Exception {
    String jsonStr = "{" + "\"dataSource\":\"test\"," + "\"parser\":{" + "\"type\":\"string\"," + "\"parseSpec\":{" + "\"format\":\"json\"," + "\"timestampSpec\":{\"column\":\"xXx\", \"format\": \"auto\", \"missingValue\": null}," + "\"dimensionsSpec\":{\"dimensions\":[], \"dimensionExclusions\":[]}," + "\"flattenSpec\":{\"useFieldDiscovery\":true, \"fields\":[]}," + "\"featureSpec\":{}}," + "\"encoding\":\"UTF-8\"" + "}," + "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}]," + "\"granularitySpec\":{" + "\"type\":\"arbitrary\"," + "\"queryGranularity\":{\"type\":\"duration\",\"duration\":86400000,\"origin\":\"1970-01-01T00:00:00.000Z\"}," + "\"intervals\":[\"2014-01-01T00:00:00.000Z/2015-01-01T00:00:00.000Z\"]}}";
    DataSchema actual = jsonMapper.readValue(jsonMapper.writeValueAsString(jsonMapper.readValue(jsonStr, DataSchema.class)), DataSchema.class);
    Assert.assertEquals(actual.getDataSource(), "test");
    Assert.assertEquals(actual.getParser().getParseSpec(), new JSONParseSpec(new TimestampSpec("xXx", null, null), new DimensionsSpec(null, Arrays.asList("metric1", "xXx", "col1"), null), null, null));
    Assert.assertEquals(actual.getAggregators(), new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1") });
    Assert.assertEquals(actual.getGranularitySpec(), new ArbitraryGranularitySpec(new DurationGranularity(86400000, null), ImmutableList.of(Interval.parse("2014/2015"))));
}
Also used : DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) DurationGranularity(io.druid.java.util.common.granularity.DurationGranularity) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) ArbitraryGranularitySpec(io.druid.segment.indexing.granularity.ArbitraryGranularitySpec) Test(org.junit.Test)

Aggregations

TimestampSpec (io.druid.data.input.impl.TimestampSpec)40 DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)31 JSONParseSpec (io.druid.data.input.impl.JSONParseSpec)16 StringInputRowParser (io.druid.data.input.impl.StringInputRowParser)16 Test (org.junit.Test)15 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)12 Map (java.util.Map)11 DataSchema (io.druid.segment.indexing.DataSchema)10 UniformGranularitySpec (io.druid.segment.indexing.granularity.UniformGranularitySpec)10 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)9 DoubleSumAggregatorFactory (io.druid.query.aggregation.DoubleSumAggregatorFactory)8 DateTime (org.joda.time.DateTime)8 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)7 ArrayList (java.util.ArrayList)7 CSVParseSpec (io.druid.data.input.impl.CSVParseSpec)6 StringDimensionSchema (io.druid.data.input.impl.StringDimensionSchema)6 TimeAndDimsParseSpec (io.druid.data.input.impl.TimeAndDimsParseSpec)6 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)6 InputRowParser (io.druid.data.input.impl.InputRowParser)5 MapInputRowParser (io.druid.data.input.impl.MapInputRowParser)5