Search in sources :

Example 6 with IAE

use of org.apache.druid.java.util.common.IAE in project druid by druid-io.

the class VSizeColumnarInts method writeToBuffer.

private static void writeToBuffer(ByteBuffer buffer, IndexedInts ints, int numBytes, int maxValue) {
    ByteBuffer helperBuffer = ByteBuffer.allocate(Integer.BYTES);
    for (int i = 0, size = ints.size(); i < size; i++) {
        int val = ints.get(i);
        if (val < 0) {
            throw new IAE("integer values must be positive, got[%d], i[%d]", val, i);
        }
        if (val > maxValue) {
            throw new IAE("val[%d] > maxValue[%d], please don't lie about maxValue.  i[%d]", val, maxValue, i);
        }
        helperBuffer.putInt(0, val);
        buffer.put(helperBuffer.array(), Integer.BYTES - numBytes, numBytes);
    }
    buffer.position(0);
}
Also used : IAE(org.apache.druid.java.util.common.IAE) ByteBuffer(java.nio.ByteBuffer)

Example 7 with IAE

use of org.apache.druid.java.util.common.IAE in project druid by druid-io.

the class VSizeColumnarInts method readFromByteBuffer.

public static VSizeColumnarInts readFromByteBuffer(ByteBuffer buffer) {
    byte versionFromBuffer = buffer.get();
    if (VERSION == versionFromBuffer) {
        int numBytes = buffer.get();
        int size = buffer.getInt();
        ByteBuffer bufferToUse = buffer.asReadOnlyBuffer();
        bufferToUse.limit(bufferToUse.position() + size);
        buffer.position(bufferToUse.limit());
        return new VSizeColumnarInts(bufferToUse, numBytes);
    }
    throw new IAE("Unknown version[%s]", versionFromBuffer);
}
Also used : IAE(org.apache.druid.java.util.common.IAE) ByteBuffer(java.nio.ByteBuffer)

Example 8 with IAE

use of org.apache.druid.java.util.common.IAE in project druid by druid-io.

the class TopNQueryRunnerTest method testTopNBySegment.

@Test
public void testTopNBySegment() {
    final HashMap<String, Object> specialContext = new HashMap<String, Object>();
    specialContext.put(QueryContexts.BY_SEGMENT_KEY, "true");
    TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(4).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).context(specialContext).build();
    List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.of("addRowsIndexConstant", 5356.814783D, "index", 5351.814783D, QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L), ImmutableMap.of("addRowsIndexConstant", 4880.669692D, "index", 4875.669692D, QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L), ImmutableMap.of("addRowsIndexConstant", 2250.876812D, "index", 2231.876812D, QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 18L)))));
    Sequence<Result<TopNResultValue>> results = runWithMerge(query);
    List<Result<BySegmentTopNResultValue>> resultList = results.map((Result<TopNResultValue> input) -> {
        // Stupid type erasure
        Object val = input.getValue();
        if (val instanceof BySegmentResultValue) {
            BySegmentResultValue bySegVal = (BySegmentResultValue) val;
            return new Result<>(input.getTimestamp(), new BySegmentTopNResultValue(Lists.transform(bySegVal.getResults(), res -> {
                if (Preconditions.checkNotNull(res) instanceof Result) {
                    Result theResult = (Result) res;
                    Object resVal = theResult.getValue();
                    if (resVal instanceof TopNResultValue) {
                        return new Result<>(theResult.getTimestamp(), (TopNResultValue) resVal);
                    }
                }
                throw new IAE("Bad input: [%s]", res);
            }), bySegVal.getSegmentId(), bySegVal.getInterval()));
        }
        throw new ISE("Bad type");
    }).toList();
    Result<BySegmentTopNResultValue> result = resultList.get(0);
    TestHelper.assertExpectedResults(expectedResults, result.getValue().getResults());
}
Also used : QueryPlus(org.apache.druid.query.QueryPlus) Arrays(java.util.Arrays) ExtractionFn(org.apache.druid.query.extraction.ExtractionFn) ByteBuffer(java.nio.ByteBuffer) Pair(org.apache.druid.java.util.common.Pair) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) DimExtractionFn(org.apache.druid.query.extraction.DimExtractionFn) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Map(java.util.Map) QueryRunner(org.apache.druid.query.QueryRunner) IAE(org.apache.druid.java.util.common.IAE) ExtractionDimensionSpec(org.apache.druid.query.dimension.ExtractionDimensionSpec) Parameterized(org.junit.runners.Parameterized) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) DateTimes(org.apache.druid.java.util.common.DateTimes) Sequence(org.apache.druid.java.util.common.guava.Sequence) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Longs(com.google.common.primitives.Longs) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) AfterClass(org.junit.AfterClass) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Closer(org.apache.druid.java.util.common.io.Closer) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) FloatMaxAggregatorFactory(org.apache.druid.query.aggregation.FloatMaxAggregatorFactory) ISE(org.apache.druid.java.util.common.ISE) TestExprMacroTable(org.apache.druid.query.expression.TestExprMacroTable) HyperUniqueFinalizingPostAggregator(org.apache.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator) FloatLastAggregatorFactory(org.apache.druid.query.aggregation.last.FloatLastAggregatorFactory) RegexDimExtractionFn(org.apache.druid.query.extraction.RegexDimExtractionFn) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) QueryContexts(org.apache.druid.query.QueryContexts) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) BySegmentResultValue(org.apache.druid.query.BySegmentResultValue) BySegmentResultValueClass(org.apache.druid.query.BySegmentResultValueClass) List(java.util.List) StringFormatExtractionFn(org.apache.druid.query.extraction.StringFormatExtractionFn) CloseableStupidPool(org.apache.druid.collections.CloseableStupidPool) ExpressionPostAggregator(org.apache.druid.query.aggregation.post.ExpressionPostAggregator) DimFilter(org.apache.druid.query.filter.DimFilter) QueryRunnerTestHelper(org.apache.druid.query.QueryRunnerTestHelper) DimensionSpec(org.apache.druid.query.dimension.DimensionSpec) Doubles(com.google.common.primitives.Doubles) DoubleFirstAggregatorFactory(org.apache.druid.query.aggregation.first.DoubleFirstAggregatorFactory) Iterables(com.google.common.collect.Iterables) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) Intervals(org.apache.druid.java.util.common.Intervals) JavaScriptExtractionFn(org.apache.druid.query.extraction.JavaScriptExtractionFn) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) RunWith(org.junit.runner.RunWith) HashMap(java.util.HashMap) JavaScriptConfig(org.apache.druid.js.JavaScriptConfig) ArrayList(java.util.ArrayList) MapLookupExtractor(org.apache.druid.query.extraction.MapLookupExtractor) Lists(com.google.common.collect.Lists) ColumnHolder(org.apache.druid.segment.column.ColumnHolder) ImmutableList(com.google.common.collect.ImmutableList) LookupExtractionFn(org.apache.druid.query.lookup.LookupExtractionFn) DoubleMinAggregatorFactory(org.apache.druid.query.aggregation.DoubleMinAggregatorFactory) TestQueryRunners(org.apache.druid.query.TestQueryRunners) LongLastAggregatorFactory(org.apache.druid.query.aggregation.last.LongLastAggregatorFactory) StringComparators(org.apache.druid.query.ordering.StringComparators) LongFirstAggregatorFactory(org.apache.druid.query.aggregation.first.LongFirstAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) FloatMinAggregatorFactory(org.apache.druid.query.aggregation.FloatMinAggregatorFactory) ListFilteredDimensionSpec(org.apache.druid.query.dimension.ListFilteredDimensionSpec) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) ExpressionVirtualColumn(org.apache.druid.segment.virtual.ExpressionVirtualColumn) ExpectedException(org.junit.rules.ExpectedException) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) FloatFirstAggregatorFactory(org.apache.druid.query.aggregation.first.FloatFirstAggregatorFactory) Nullable(javax.annotation.Nullable) DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) CardinalityAggregatorFactory(org.apache.druid.query.aggregation.cardinality.CardinalityAggregatorFactory) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) ResponseContext(org.apache.druid.query.context.ResponseContext) TimeFormatExtractionFn(org.apache.druid.query.extraction.TimeFormatExtractionFn) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IOException(java.io.IOException) ExtractionDimFilter(org.apache.druid.query.filter.ExtractionDimFilter) StrlenExtractionFn(org.apache.druid.query.extraction.StrlenExtractionFn) Granularities(org.apache.druid.java.util.common.granularity.Granularities) Result(org.apache.druid.query.Result) TestHelper(org.apache.druid.segment.TestHelper) Rule(org.junit.Rule) NullHandling(org.apache.druid.common.config.NullHandling) ColumnType(org.apache.druid.segment.column.ColumnType) Preconditions(com.google.common.base.Preconditions) Assert(org.junit.Assert) Collections(java.util.Collections) BySegmentResultValue(org.apache.druid.query.BySegmentResultValue) HashMap(java.util.HashMap) IAE(org.apache.druid.java.util.common.IAE) Result(org.apache.druid.query.Result) ISE(org.apache.druid.java.util.common.ISE) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 9 with IAE

use of org.apache.druid.java.util.common.IAE in project druid by druid-io.

the class JoinableFactoryWrapper method computeJoinDataSourceCacheKey.

/**
 * Compute a cache key prefix for a join data source. This includes the data sources that participate in the RHS of a
 * join as well as any query specific constructs associated with join data source such as base table filter. This key prefix
 * can be used in segment level cache or result level cache. The function can return following wrapped in an
 * Optional
 * - Non-empty byte array - If there is join datasource involved and caching is possible. The result includes
 * join condition expression, join type and cache key returned by joinable factory for each {@link PreJoinableClause}
 * - NULL - There is a join but caching is not possible. It may happen if one of the participating datasource
 * in the JOIN is not cacheable.
 *
 * @param dataSourceAnalysis for the join datasource
 *
 * @return the optional cache key to be used as part of query cache key
 *
 * @throws {@link IAE} if this operation is called on a non-join data source
 */
public Optional<byte[]> computeJoinDataSourceCacheKey(final DataSourceAnalysis dataSourceAnalysis) {
    final List<PreJoinableClause> clauses = dataSourceAnalysis.getPreJoinableClauses();
    if (clauses.isEmpty()) {
        throw new IAE("No join clauses to build the cache key for data source [%s]", dataSourceAnalysis.getDataSource());
    }
    final CacheKeyBuilder keyBuilder;
    keyBuilder = new CacheKeyBuilder(JOIN_OPERATION);
    if (dataSourceAnalysis.getJoinBaseTableFilter().isPresent()) {
        keyBuilder.appendCacheable(dataSourceAnalysis.getJoinBaseTableFilter().get());
    }
    for (PreJoinableClause clause : clauses) {
        Optional<byte[]> bytes = joinableFactory.computeJoinCacheKey(clause.getDataSource(), clause.getCondition());
        if (!bytes.isPresent()) {
            // Encountered a data source which didn't support cache yet
            log.debug("skipping caching for join since [%s] does not support caching", clause.getDataSource());
            return Optional.empty();
        }
        keyBuilder.appendByteArray(bytes.get());
        keyBuilder.appendString(clause.getCondition().getOriginalExpression());
        keyBuilder.appendString(clause.getPrefix());
        keyBuilder.appendString(clause.getJoinType().name());
    }
    return Optional.of(keyBuilder.build());
}
Also used : PreJoinableClause(org.apache.druid.query.planning.PreJoinableClause) CacheKeyBuilder(org.apache.druid.query.cache.CacheKeyBuilder) IAE(org.apache.druid.java.util.common.IAE)

Example 10 with IAE

use of org.apache.druid.java.util.common.IAE in project druid by druid-io.

the class DictionaryEncodedColumnPartSerde method getDeserializer.

@Override
public Deserializer getDeserializer() {
    return new Deserializer() {

        @Override
        public void read(ByteBuffer buffer, ColumnBuilder builder, ColumnConfig columnConfig) {
            final VERSION rVersion = VERSION.fromByte(buffer.get());
            final int rFlags;
            if (rVersion.compareTo(VERSION.COMPRESSED) >= 0) {
                rFlags = buffer.getInt();
            } else {
                rFlags = rVersion.equals(VERSION.UNCOMPRESSED_MULTI_VALUE) ? Feature.MULTI_VALUE.getMask() : NO_FLAGS;
            }
            final boolean hasMultipleValues = Feature.MULTI_VALUE.isSet(rFlags) || Feature.MULTI_VALUE_V3.isSet(rFlags);
            // Duplicate the first buffer since we are reading the dictionary twice.
            final GenericIndexed<String> rDictionary = GenericIndexed.read(buffer.duplicate(), GenericIndexed.STRING_STRATEGY, builder.getFileMapper());
            final GenericIndexed<ByteBuffer> rDictionaryUtf8 = GenericIndexed.read(buffer, GenericIndexed.BYTE_BUFFER_STRATEGY, builder.getFileMapper());
            builder.setType(ValueType.STRING);
            final WritableSupplier<ColumnarInts> rSingleValuedColumn;
            final WritableSupplier<ColumnarMultiInts> rMultiValuedColumn;
            if (hasMultipleValues) {
                rMultiValuedColumn = readMultiValuedColumn(rVersion, buffer, rFlags);
                rSingleValuedColumn = null;
            } else {
                rSingleValuedColumn = readSingleValuedColumn(rVersion, buffer);
                rMultiValuedColumn = null;
            }
            final String firstDictionaryEntry = rDictionary.get(0);
            DictionaryEncodedColumnSupplier dictionaryEncodedColumnSupplier = new DictionaryEncodedColumnSupplier(rDictionary, rDictionaryUtf8, rSingleValuedColumn, rMultiValuedColumn, columnConfig.columnCacheSizeBytes());
            builder.setHasMultipleValues(hasMultipleValues).setHasNulls(firstDictionaryEntry == null).setDictionaryEncodedColumnSupplier(dictionaryEncodedColumnSupplier);
            if (!Feature.NO_BITMAP_INDEX.isSet(rFlags)) {
                GenericIndexed<ImmutableBitmap> rBitmaps = GenericIndexed.read(buffer, bitmapSerdeFactory.getObjectStrategy(), builder.getFileMapper());
                builder.setBitmapIndex(new StringBitmapIndexColumnPartSupplier(bitmapSerdeFactory.getBitmapFactory(), rBitmaps, rDictionary));
            }
            if (buffer.hasRemaining()) {
                ImmutableRTree rSpatialIndex = new ImmutableRTreeObjectStrategy(bitmapSerdeFactory.getBitmapFactory()).fromByteBufferWithSize(buffer);
                builder.setSpatialIndex(new SpatialIndexColumnPartSupplier(rSpatialIndex));
            }
        }

        private WritableSupplier<ColumnarInts> readSingleValuedColumn(VERSION version, ByteBuffer buffer) {
            switch(version) {
                case UNCOMPRESSED_SINGLE_VALUE:
                case UNCOMPRESSED_WITH_FLAGS:
                    return VSizeColumnarInts.readFromByteBuffer(buffer);
                case COMPRESSED:
                    return CompressedVSizeColumnarIntsSupplier.fromByteBuffer(buffer, byteOrder);
                default:
                    throw new IAE("Unsupported single-value version[%s]", version);
            }
        }

        private WritableSupplier<ColumnarMultiInts> readMultiValuedColumn(VERSION version, ByteBuffer buffer, int flags) {
            switch(version) {
                case UNCOMPRESSED_MULTI_VALUE:
                    {
                        return VSizeColumnarMultiInts.readFromByteBuffer(buffer);
                    }
                case UNCOMPRESSED_WITH_FLAGS:
                    {
                        if (Feature.MULTI_VALUE.isSet(flags)) {
                            return VSizeColumnarMultiInts.readFromByteBuffer(buffer);
                        } else {
                            throw new IAE("Unrecognized multi-value flag[%d] for version[%s]", flags, version);
                        }
                    }
                case COMPRESSED:
                    {
                        if (Feature.MULTI_VALUE.isSet(flags)) {
                            return CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(buffer, byteOrder);
                        } else if (Feature.MULTI_VALUE_V3.isSet(flags)) {
                            return V3CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(buffer, byteOrder);
                        } else {
                            throw new IAE("Unrecognized multi-value flag[%d] for version[%s]", flags, version);
                        }
                    }
                default:
                    throw new IAE("Unsupported multi-value version[%s]", version);
            }
        }
    };
}
Also used : ColumnConfig(org.apache.druid.segment.column.ColumnConfig) ImmutableBitmap(org.apache.druid.collections.bitmap.ImmutableBitmap) ColumnarInts(org.apache.druid.segment.data.ColumnarInts) VSizeColumnarInts(org.apache.druid.segment.data.VSizeColumnarInts) IAE(org.apache.druid.java.util.common.IAE) ByteBuffer(java.nio.ByteBuffer) ColumnarMultiInts(org.apache.druid.segment.data.ColumnarMultiInts) VSizeColumnarMultiInts(org.apache.druid.segment.data.VSizeColumnarMultiInts) ImmutableRTree(org.apache.druid.collections.spatial.ImmutableRTree) ImmutableRTreeObjectStrategy(org.apache.druid.segment.data.ImmutableRTreeObjectStrategy) ColumnBuilder(org.apache.druid.segment.column.ColumnBuilder)

Aggregations

IAE (org.apache.druid.java.util.common.IAE)115 ISE (org.apache.druid.java.util.common.ISE)23 IOException (java.io.IOException)20 ByteBuffer (java.nio.ByteBuffer)19 ArrayList (java.util.ArrayList)16 List (java.util.List)14 Expr (org.apache.druid.math.expr.Expr)14 Nullable (javax.annotation.Nullable)12 ColumnType (org.apache.druid.segment.column.ColumnType)10 HashSet (java.util.HashSet)8 Map (java.util.Map)8 Interval (org.joda.time.Interval)8 VisibleForTesting (com.google.common.annotations.VisibleForTesting)7 HashMap (java.util.HashMap)7 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)7 File (java.io.File)6 Iterables (com.google.common.collect.Iterables)5 Arrays (java.util.Arrays)5 Test (org.junit.Test)5 ImmutableMap (com.google.common.collect.ImmutableMap)4