Search in sources :

Example 1 with DimensionSchema

use of org.apache.druid.data.input.impl.DimensionSchema in project druid by druid-io.

the class SegmentAnalyzerTest method testIncrementalWorksHelper.

private void testIncrementalWorksHelper(EnumSet<SegmentMetadataQuery.AnalysisType> analyses) {
    final List<SegmentAnalysis> results = getSegmentAnalysises(new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), SegmentId.dummy("ds")), analyses);
    Assert.assertEquals(1, results.size());
    final SegmentAnalysis analysis = results.get(0);
    Assert.assertEquals(SegmentId.dummy("ds").toString(), analysis.getId());
    final Map<String, ColumnAnalysis> columns = analysis.getColumns();
    Assert.assertEquals(TestIndex.COLUMNS.length + 3, columns.size());
    for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) {
        final String dimension = schema.getName();
        final ColumnAnalysis columnAnalysis = columns.get(dimension);
        final boolean isString = schema.getColumnType().is(ValueType.STRING);
        Assert.assertEquals(dimension, schema.getColumnType().toString(), columnAnalysis.getType());
        Assert.assertEquals(dimension, 0, columnAnalysis.getSize());
        if (isString) {
            if (analyses == null) {
                Assert.assertTrue(dimension, columnAnalysis.getCardinality() > 0);
            } else {
                Assert.assertEquals(dimension, 0, columnAnalysis.getCardinality().longValue());
            }
        } else {
            Assert.assertNull(dimension, columnAnalysis.getCardinality());
        }
    }
    for (String metric : TestIndex.DOUBLE_METRICS) {
        final ColumnAnalysis columnAnalysis = columns.get(metric);
        Assert.assertEquals(metric, ValueType.DOUBLE.name(), columnAnalysis.getType());
        Assert.assertEquals(metric, 0, columnAnalysis.getSize());
        Assert.assertNull(metric, columnAnalysis.getCardinality());
    }
    for (String metric : TestIndex.FLOAT_METRICS) {
        final ColumnAnalysis columnAnalysis = columns.get(metric);
        Assert.assertEquals(metric, ValueType.FLOAT.name(), columnAnalysis.getType());
        Assert.assertEquals(metric, 0, columnAnalysis.getSize());
        Assert.assertNull(metric, columnAnalysis.getCardinality());
    }
}
Also used : IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema)

Example 2 with DimensionSchema

use of org.apache.druid.data.input.impl.DimensionSchema in project druid by druid-io.

the class QueryableIndexColumnCapabilitiesTest method setup.

@BeforeClass
public static void setup() throws IOException {
    MapInputRowParser parser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>builder().addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1", "d2"))).add(new DoubleDimensionSchema("d3")).add(new FloatDimensionSchema("d4")).add(new LongDimensionSchema("d5")).build())));
    AggregatorFactory[] metricsSpecs = new AggregatorFactory[] { new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "d3"), new FloatSumAggregatorFactory("m2", "d4"), new LongSumAggregatorFactory("m3", "d5"), new HyperUniquesAggregatorFactory("m4", "d1") };
    List<InputRow> rows = new ArrayList<>();
    Map<String, Object> event = ImmutableMap.<String, Object>builder().put("time", DateTimes.nowUtc().getMillis()).put("d1", "some string").put("d2", ImmutableList.of("some", "list")).put("d3", 1.234).put("d4", 1.234f).put("d5", 10L).build();
    rows.add(Iterables.getOnlyElement(parser.parseBatch(event)));
    IndexBuilder builder = IndexBuilder.create().rows(rows).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
    INC_INDEX = builder.buildIncrementalIndex();
    MMAP_INDEX = builder.buildMMappedIndex();
    List<InputRow> rowsWithNulls = new ArrayList<>();
    rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(event)));
    Map<String, Object> eventWithNulls = new HashMap<>();
    eventWithNulls.put("time", DateTimes.nowUtc().getMillis());
    eventWithNulls.put("d1", null);
    eventWithNulls.put("d2", ImmutableList.of());
    eventWithNulls.put("d3", null);
    eventWithNulls.put("d4", null);
    eventWithNulls.put("d5", null);
    rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(eventWithNulls)));
    IndexBuilder builderWithNulls = IndexBuilder.create().rows(rowsWithNulls).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
    INC_INDEX_WITH_NULLS = builderWithNulls.buildIncrementalIndex();
    MMAP_INDEX_WITH_NULLS = builderWithNulls.buildMMappedIndex();
}
Also used : MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) HashMap(java.util.HashMap) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) ArrayList(java.util.ArrayList) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) FloatSumAggregatorFactory(org.apache.druid.query.aggregation.FloatSumAggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) FloatSumAggregatorFactory(org.apache.druid.query.aggregation.FloatSumAggregatorFactory) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) TimeAndDimsParseSpec(org.apache.druid.data.input.impl.TimeAndDimsParseSpec) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) InputRow(org.apache.druid.data.input.InputRow) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) BeforeClass(org.junit.BeforeClass)

Example 3 with DimensionSchema

use of org.apache.druid.data.input.impl.DimensionSchema in project druid by druid-io.

the class InputRowSerde method getTypeHelperMap.

public static Map<String, IndexSerdeTypeHelper> getTypeHelperMap(DimensionsSpec dimensionsSpec) {
    Map<String, IndexSerdeTypeHelper> typeHelperMap = new HashMap<>();
    for (DimensionSchema dimensionSchema : dimensionsSpec.getDimensions()) {
        IndexSerdeTypeHelper typeHelper;
        switch(dimensionSchema.getColumnType().getType()) {
            case STRING:
                typeHelper = STRING_HELPER;
                break;
            case LONG:
                typeHelper = LONG_HELPER;
                break;
            case FLOAT:
                typeHelper = FLOAT_HELPER;
                break;
            case DOUBLE:
                typeHelper = DOUBLE_HELPER;
                break;
            default:
                throw new IAE("Invalid type: [%s]", dimensionSchema.getColumnType());
        }
        typeHelperMap.put(dimensionSchema.getName(), typeHelper);
    }
    return typeHelperMap;
}
Also used : HashMap(java.util.HashMap) IAE(org.apache.druid.java.util.common.IAE) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema)

Example 4 with DimensionSchema

use of org.apache.druid.data.input.impl.DimensionSchema in project druid by druid-io.

the class CompactionTask method createDimensionsSpec.

private static DimensionsSpec createDimensionsSpec(List<NonnullPair<QueryableIndex, DataSegment>> queryableIndices) {
    final BiMap<String, Integer> uniqueDims = HashBiMap.create();
    final Map<String, DimensionSchema> dimensionSchemaMap = new HashMap<>();
    // Here, we try to retain the order of dimensions as they were specified since the order of dimensions may be
    // optimized for performance.
    // Dimensions are extracted from the recent segments to olders because recent segments are likely to be queried more
    // frequently, and thus the performance should be optimized for recent ones rather than old ones.
    // sort timelineSegments in order of interval, see https://github.com/apache/druid/pull/9905
    queryableIndices.sort((o1, o2) -> Comparators.intervalsByStartThenEnd().compare(o1.rhs.getInterval(), o2.rhs.getInterval()));
    int index = 0;
    for (NonnullPair<QueryableIndex, DataSegment> pair : Lists.reverse(queryableIndices)) {
        final QueryableIndex queryableIndex = pair.lhs;
        final Map<String, DimensionHandler> dimensionHandlerMap = queryableIndex.getDimensionHandlers();
        for (String dimension : queryableIndex.getAvailableDimensions()) {
            final ColumnHolder columnHolder = Preconditions.checkNotNull(queryableIndex.getColumnHolder(dimension), "Cannot find column for dimension[%s]", dimension);
            if (!uniqueDims.containsKey(dimension)) {
                final DimensionHandler dimensionHandler = Preconditions.checkNotNull(dimensionHandlerMap.get(dimension), "Cannot find dimensionHandler for dimension[%s]", dimension);
                uniqueDims.put(dimension, index++);
                dimensionSchemaMap.put(dimension, createDimensionSchema(dimension, columnHolder.getCapabilities(), dimensionHandler.getMultivalueHandling()));
            }
        }
    }
    final BiMap<Integer, String> orderedDims = uniqueDims.inverse();
    final List<DimensionSchema> dimensionSchemas = IntStream.range(0, orderedDims.size()).mapToObj(i -> {
        final String dimName = orderedDims.get(i);
        return Preconditions.checkNotNull(dimensionSchemaMap.get(dimName), "Cannot find dimension[%s] from dimensionSchemaMap", dimName);
    }).collect(Collectors.toList());
    return new DimensionsSpec(dimensionSchemas);
}
Also used : Verify(org.apache.curator.shaded.com.google.common.base.Verify) TaskToolbox(org.apache.druid.indexing.common.TaskToolbox) JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) Comparators(org.apache.druid.java.util.common.guava.Comparators) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) DimensionHandlerUtils(org.apache.druid.segment.DimensionHandlerUtils) IndexSpec(org.apache.druid.segment.IndexSpec) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) JodaUtils(org.apache.druid.java.util.common.JodaUtils) TaskActionClient(org.apache.druid.indexing.common.actions.TaskActionClient) Map(java.util.Map) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) AppenderatorsManager(org.apache.druid.segment.realtime.appenderator.AppenderatorsManager) IAE(org.apache.druid.java.util.common.IAE) MultiValueHandling(org.apache.druid.data.input.impl.DimensionSchema.MultiValueHandling) BiMap(com.google.common.collect.BiMap) JacksonInject(com.fasterxml.jackson.annotation.JacksonInject) Property(org.apache.druid.indexer.Property) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) SplitHintSpec(org.apache.druid.data.input.SplitHintSpec) Segments(org.apache.druid.indexing.overlord.Segments) QueryableIndex(org.apache.druid.segment.QueryableIndex) StringUtils(org.apache.druid.java.util.common.StringUtils) ISE(org.apache.druid.java.util.common.ISE) Collectors(java.util.stream.Collectors) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) LockGranularity(org.apache.druid.indexing.common.LockGranularity) PartitionHolder(org.apache.druid.timeline.partition.PartitionHolder) List(java.util.List) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) IndexTuningConfig(org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig) DataSegment(org.apache.druid.timeline.DataSegment) Entry(java.util.Map.Entry) ColumnCapabilities(org.apache.druid.segment.column.ColumnCapabilities) TransformSpec(org.apache.druid.segment.transform.TransformSpec) Logger(org.apache.druid.java.util.common.logger.Logger) IntStream(java.util.stream.IntStream) Granularity(org.apache.druid.java.util.common.granularity.Granularity) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) Intervals(org.apache.druid.java.util.common.Intervals) Duration(org.joda.time.Duration) SegmentLoadingException(org.apache.druid.segment.loading.SegmentLoadingException) SegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory) HashMap(java.util.HashMap) ParallelIndexSupervisorTask(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexSupervisorTask) TaskStatus(org.apache.druid.indexer.TaskStatus) TuningConfig(org.apache.druid.segment.indexing.TuningConfig) ArrayList(java.util.ArrayList) PartitionChunk(org.apache.druid.timeline.partition.PartitionChunk) Interval(org.joda.time.Interval) Lists(com.google.common.collect.Lists) ColumnHolder(org.apache.druid.segment.column.ColumnHolder) ImmutableList(com.google.common.collect.ImmutableList) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) SettableSupplier(org.apache.druid.common.guava.SettableSupplier) CoordinatorClient(org.apache.druid.client.coordinator.CoordinatorClient) JsonIgnore(com.fasterxml.jackson.annotation.JsonIgnore) CompactSegments(org.apache.druid.server.coordinator.duty.CompactSegments) DruidInputSource(org.apache.druid.indexing.input.DruidInputSource) Nonnull(javax.annotation.Nonnull) PartitionsSpec(org.apache.druid.indexer.partitions.PartitionsSpec) Nullable(javax.annotation.Nullable) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) RE(org.apache.druid.java.util.common.RE) NonnullPair(org.apache.druid.java.util.common.NonnullPair) GranularitySpec(org.apache.druid.segment.indexing.granularity.GranularitySpec) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) Include(com.fasterxml.jackson.annotation.JsonInclude.Include) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) ParallelIndexTuningConfig(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTuningConfig) IOException(java.io.IOException) ClientCompactionTaskQuery(org.apache.druid.client.indexing.ClientCompactionTaskQuery) File(java.io.File) HashBiMap(com.google.common.collect.HashBiMap) ClientCompactionTaskGranularitySpec(org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec) GranularityType(org.apache.druid.java.util.common.granularity.GranularityType) DimensionHandler(org.apache.druid.segment.DimensionHandler) TreeMap(java.util.TreeMap) Checks(org.apache.druid.indexer.Checks) JsonCreator(com.fasterxml.jackson.annotation.JsonCreator) JsonInclude(com.fasterxml.jackson.annotation.JsonInclude) AppendableIndexSpec(org.apache.druid.segment.incremental.AppendableIndexSpec) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) ParallelIndexIngestionSpec(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexIngestionSpec) RetrieveUsedSegmentsAction(org.apache.druid.indexing.common.actions.RetrieveUsedSegmentsAction) ParallelIndexIOConfig(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexIOConfig) IndexIO(org.apache.druid.segment.IndexIO) DataSchema(org.apache.druid.segment.indexing.DataSchema) Collections(java.util.Collections) ColumnHolder(org.apache.druid.segment.column.ColumnHolder) DimensionHandler(org.apache.druid.segment.DimensionHandler) HashMap(java.util.HashMap) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) DataSegment(org.apache.druid.timeline.DataSegment) QueryableIndex(org.apache.druid.segment.QueryableIndex) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec)

Example 5 with DimensionSchema

use of org.apache.druid.data.input.impl.DimensionSchema in project druid by druid-io.

the class CompactionTaskTest method testCreateDimensionSchema.

@Test
public void testCreateDimensionSchema() {
    final String dimensionName = "dim";
    DimensionHandlerUtils.registerDimensionHandlerProvider(ExtensionDimensionHandler.TYPE_NAME, d -> new ExtensionDimensionHandler(d));
    DimensionSchema stringSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleSingleValueStringColumnCapabilities().setHasBitmapIndexes(true).setDictionaryEncoded(true).setDictionaryValuesUnique(true).setDictionaryValuesUnique(true), DimensionSchema.MultiValueHandling.SORTED_SET);
    Assert.assertTrue(stringSchema instanceof StringDimensionSchema);
    DimensionSchema floatSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.FLOAT), null);
    Assert.assertTrue(floatSchema instanceof FloatDimensionSchema);
    DimensionSchema doubleSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.DOUBLE), null);
    Assert.assertTrue(doubleSchema instanceof DoubleDimensionSchema);
    DimensionSchema longSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.LONG), null);
    Assert.assertTrue(longSchema instanceof LongDimensionSchema);
    DimensionSchema extensionSchema = CompactionTask.createDimensionSchema(dimensionName, ColumnCapabilitiesImpl.createSimpleNumericColumnCapabilities(ColumnType.ofComplex(ExtensionDimensionHandler.TYPE_NAME)), null);
    Assert.assertTrue(extensionSchema instanceof ExtensionDimensionSchema);
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) DimensionSchema(org.apache.druid.data.input.impl.DimensionSchema) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) FloatDimensionSchema(org.apache.druid.data.input.impl.FloatDimensionSchema) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) Test(org.junit.Test)

Aggregations

DimensionSchema (org.apache.druid.data.input.impl.DimensionSchema)17 ArrayList (java.util.ArrayList)10 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)9 StringDimensionSchema (org.apache.druid.data.input.impl.StringDimensionSchema)9 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)7 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)7 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)6 LongDimensionSchema (org.apache.druid.data.input.impl.LongDimensionSchema)5 DataSchema (org.apache.druid.segment.indexing.DataSchema)5 UniformGranularitySpec (org.apache.druid.segment.indexing.granularity.UniformGranularitySpec)5 DoubleDimensionSchema (org.apache.druid.data.input.impl.DoubleDimensionSchema)4 FloatDimensionSchema (org.apache.druid.data.input.impl.FloatDimensionSchema)4 HashMap (java.util.HashMap)3 ImmutableList (com.google.common.collect.ImmutableList)2 File (java.io.File)2 HashSet (java.util.HashSet)2 ClientCompactionTaskGranularitySpec (org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec)2 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)2 ParseSpec (org.apache.druid.data.input.impl.ParseSpec)2 HadoopDruidIndexerConfig (org.apache.druid.indexer.HadoopDruidIndexerConfig)2