Search in sources :

Example 21 with ColumnAnalysis

use of io.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.

the class SegmentAnalyzer method analyzeStringColumn.

private ColumnAnalysis analyzeStringColumn(final ColumnCapabilities capabilities, final StorageAdapter storageAdapter, final String columnName) {
    int cardinality = 0;
    long size = 0;
    Comparable min = null;
    Comparable max = null;
    if (analyzingCardinality()) {
        cardinality = storageAdapter.getDimensionCardinality(columnName);
    }
    if (analyzingSize()) {
        final long start = storageAdapter.getMinTime().getMillis();
        final long end = storageAdapter.getMaxTime().getMillis();
        final Sequence<Cursor> cursors = storageAdapter.makeCursors(null, new Interval(start, end), VirtualColumns.EMPTY, Granularities.ALL, false);
        size = cursors.accumulate(0L, new Accumulator<Long, Cursor>() {

            @Override
            public Long accumulate(Long accumulated, Cursor cursor) {
                DimensionSelector selector = cursor.makeDimensionSelector(new DefaultDimensionSpec(columnName, columnName));
                if (selector == null) {
                    return accumulated;
                }
                long current = accumulated;
                while (!cursor.isDone()) {
                    final IndexedInts vals = selector.getRow();
                    for (int i = 0; i < vals.size(); ++i) {
                        final String dimVal = selector.lookupName(vals.get(i));
                        if (dimVal != null && !dimVal.isEmpty()) {
                            current += StringUtils.estimatedBinaryLengthAsUTF8(dimVal);
                        }
                    }
                    cursor.advance();
                }
                return current;
            }
        });
    }
    if (analyzingMinMax()) {
        min = storageAdapter.getMinValue(columnName);
        max = storageAdapter.getMaxValue(columnName);
    }
    return new ColumnAnalysis(capabilities.getType().name(), capabilities.hasMultipleValues(), size, cardinality, min, max, null);
}
Also used : Accumulator(io.druid.java.util.common.guava.Accumulator) DimensionSelector(io.druid.segment.DimensionSelector) Cursor(io.druid.segment.Cursor) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) IndexedInts(io.druid.segment.data.IndexedInts) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) Interval(org.joda.time.Interval)

Example 22 with ColumnAnalysis

use of io.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.

the class SegmentMetadataQueryQueryToolChest method mergeAnalyses.

@VisibleForTesting
public static SegmentAnalysis mergeAnalyses(final SegmentAnalysis arg1, final SegmentAnalysis arg2, boolean lenientAggregatorMerge) {
    if (arg1 == null) {
        return arg2;
    }
    if (arg2 == null) {
        return arg1;
    }
    List<Interval> newIntervals = null;
    if (arg1.getIntervals() != null) {
        newIntervals = Lists.newArrayList();
        newIntervals.addAll(arg1.getIntervals());
    }
    if (arg2.getIntervals() != null) {
        if (newIntervals == null) {
            newIntervals = Lists.newArrayList();
        }
        newIntervals.addAll(arg2.getIntervals());
    }
    final Map<String, ColumnAnalysis> leftColumns = arg1.getColumns();
    final Map<String, ColumnAnalysis> rightColumns = arg2.getColumns();
    Map<String, ColumnAnalysis> columns = Maps.newTreeMap();
    Set<String> rightColumnNames = Sets.newHashSet(rightColumns.keySet());
    for (Map.Entry<String, ColumnAnalysis> entry : leftColumns.entrySet()) {
        final String columnName = entry.getKey();
        columns.put(columnName, entry.getValue().fold(rightColumns.get(columnName)));
        rightColumnNames.remove(columnName);
    }
    for (String columnName : rightColumnNames) {
        columns.put(columnName, rightColumns.get(columnName));
    }
    final Map<String, AggregatorFactory> aggregators = Maps.newHashMap();
    if (lenientAggregatorMerge) {
        // Merge each aggregator individually, ignoring nulls
        for (SegmentAnalysis analysis : ImmutableList.of(arg1, arg2)) {
            if (analysis.getAggregators() != null) {
                for (Map.Entry<String, AggregatorFactory> entry : analysis.getAggregators().entrySet()) {
                    final String aggregatorName = entry.getKey();
                    final AggregatorFactory aggregator = entry.getValue();
                    AggregatorFactory merged = aggregators.get(aggregatorName);
                    if (merged != null) {
                        try {
                            merged = merged.getMergingFactory(aggregator);
                        } catch (AggregatorFactoryNotMergeableException e) {
                            merged = null;
                        }
                    } else {
                        merged = aggregator;
                    }
                    aggregators.put(aggregatorName, merged);
                }
            }
        }
    } else {
        final AggregatorFactory[] aggs1 = arg1.getAggregators() != null ? arg1.getAggregators().values().toArray(new AggregatorFactory[arg1.getAggregators().size()]) : null;
        final AggregatorFactory[] aggs2 = arg2.getAggregators() != null ? arg2.getAggregators().values().toArray(new AggregatorFactory[arg2.getAggregators().size()]) : null;
        final AggregatorFactory[] merged = AggregatorFactory.mergeAggregators(Arrays.asList(aggs1, aggs2));
        if (merged != null) {
            for (AggregatorFactory aggregator : merged) {
                aggregators.put(aggregator.getName(), aggregator);
            }
        }
    }
    final TimestampSpec timestampSpec = TimestampSpec.mergeTimestampSpec(Lists.newArrayList(arg1.getTimestampSpec(), arg2.getTimestampSpec()));
    final Granularity queryGranularity = Granularity.mergeGranularities(Lists.newArrayList(arg1.getQueryGranularity(), arg2.getQueryGranularity()));
    final String mergedId;
    if (arg1.getId() != null && arg2.getId() != null && arg1.getId().equals(arg2.getId())) {
        mergedId = arg1.getId();
    } else {
        mergedId = "merged";
    }
    final Boolean rollup;
    if (arg1.isRollup() != null && arg2.isRollup() != null && arg1.isRollup().equals(arg2.isRollup())) {
        rollup = arg1.isRollup();
    } else {
        rollup = null;
    }
    return new SegmentAnalysis(mergedId, newIntervals, columns, arg1.getSize() + arg2.getSize(), arg1.getNumRows() + arg2.getNumRows(), aggregators.isEmpty() ? null : aggregators, timestampSpec, queryGranularity, rollup);
}
Also used : AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) Granularity(io.druid.java.util.common.granularity.Granularity) AggregatorFactoryNotMergeableException(io.druid.query.aggregation.AggregatorFactoryNotMergeableException) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) TimestampSpec(io.druid.data.input.impl.TimestampSpec) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) Map(java.util.Map) Interval(org.joda.time.Interval) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 23 with ColumnAnalysis

use of io.druid.query.metadata.metadata.ColumnAnalysis in project druid by druid-io.

the class SegmentMetadataQueryRunnerFactory method createRunner.

@Override
public QueryRunner<SegmentAnalysis> createRunner(final Segment segment) {
    return new QueryRunner<SegmentAnalysis>() {

        @Override
        public Sequence<SegmentAnalysis> run(Query<SegmentAnalysis> inQ, Map<String, Object> responseContext) {
            SegmentMetadataQuery query = (SegmentMetadataQuery) inQ;
            final SegmentAnalyzer analyzer = new SegmentAnalyzer(query.getAnalysisTypes());
            final Map<String, ColumnAnalysis> analyzedColumns = analyzer.analyze(segment);
            final long numRows = analyzer.numRows(segment);
            long totalSize = 0;
            if (analyzer.analyzingSize()) {
                // Initialize with the size of the whitespace, 1 byte per
                totalSize = analyzedColumns.size() * numRows;
            }
            Map<String, ColumnAnalysis> columns = Maps.newTreeMap();
            ColumnIncluderator includerator = query.getToInclude();
            for (Map.Entry<String, ColumnAnalysis> entry : analyzedColumns.entrySet()) {
                final String columnName = entry.getKey();
                final ColumnAnalysis column = entry.getValue();
                if (!column.isError()) {
                    totalSize += column.getSize();
                }
                if (includerator.include(columnName)) {
                    columns.put(columnName, column);
                }
            }
            List<Interval> retIntervals = query.analyzingInterval() ? Arrays.asList(segment.getDataInterval()) : null;
            final Map<String, AggregatorFactory> aggregators;
            Metadata metadata = null;
            if (query.hasAggregators()) {
                metadata = segment.asStorageAdapter().getMetadata();
                if (metadata != null && metadata.getAggregators() != null) {
                    aggregators = Maps.newHashMap();
                    for (AggregatorFactory aggregator : metadata.getAggregators()) {
                        aggregators.put(aggregator.getName(), aggregator);
                    }
                } else {
                    aggregators = null;
                }
            } else {
                aggregators = null;
            }
            final TimestampSpec timestampSpec;
            if (query.hasTimestampSpec()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                timestampSpec = metadata != null ? metadata.getTimestampSpec() : null;
            } else {
                timestampSpec = null;
            }
            final Granularity queryGranularity;
            if (query.hasQueryGranularity()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                queryGranularity = metadata != null ? metadata.getQueryGranularity() : null;
            } else {
                queryGranularity = null;
            }
            Boolean rollup = null;
            if (query.hasRollup()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                rollup = metadata != null ? metadata.isRollup() : null;
                if (rollup == null) {
                    // in this case, this segment is built before no-rollup function is coded,
                    // thus it is built with rollup
                    rollup = Boolean.TRUE;
                }
            }
            return Sequences.simple(Arrays.asList(new SegmentAnalysis(segment.getIdentifier(), retIntervals, columns, totalSize, numRows, aggregators, timestampSpec, queryGranularity, rollup)));
        }
    };
}
Also used : BaseQuery(io.druid.query.BaseQuery) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) Query(io.druid.query.Query) Metadata(io.druid.segment.Metadata) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) Granularity(io.druid.java.util.common.granularity.Granularity) ColumnIncluderator(io.druid.query.metadata.metadata.ColumnIncluderator) QueryRunner(io.druid.query.QueryRunner) ConcatQueryRunner(io.druid.query.ConcatQueryRunner) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) TimestampSpec(io.druid.data.input.impl.TimestampSpec) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) Map(java.util.Map) Interval(org.joda.time.Interval)

Aggregations

ColumnAnalysis (io.druid.query.metadata.metadata.ColumnAnalysis)23 SegmentAnalysis (io.druid.query.metadata.metadata.SegmentAnalysis)16 Test (org.junit.Test)12 QueryRunner (io.druid.query.QueryRunner)9 ListColumnIncluderator (io.druid.query.metadata.metadata.ListColumnIncluderator)9 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)8 QueryToolChest (io.druid.query.QueryToolChest)8 ExecutorService (java.util.concurrent.ExecutorService)8 SegmentMetadataQuery (io.druid.query.metadata.metadata.SegmentMetadataQuery)6 Interval (org.joda.time.Interval)4 TimestampSpec (io.druid.data.input.impl.TimestampSpec)3 Query (io.druid.query.Query)3 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)3 Map (java.util.Map)3 DimensionSchema (io.druid.data.input.impl.DimensionSchema)2 Granularity (io.druid.java.util.common.granularity.Granularity)2 TableDataSource (io.druid.query.TableDataSource)2 ComplexColumn (io.druid.segment.column.ComplexColumn)2 ValueType (io.druid.segment.column.ValueType)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1