use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class VSizeColumnarInts method writeToBuffer.
private static void writeToBuffer(ByteBuffer buffer, IndexedInts ints, int numBytes, int maxValue) {
ByteBuffer helperBuffer = ByteBuffer.allocate(Integer.BYTES);
for (int i = 0, size = ints.size(); i < size; i++) {
int val = ints.get(i);
if (val < 0) {
throw new IAE("integer values must be positive, got[%d], i[%d]", val, i);
}
if (val > maxValue) {
throw new IAE("val[%d] > maxValue[%d], please don't lie about maxValue. i[%d]", val, maxValue, i);
}
helperBuffer.putInt(0, val);
buffer.put(helperBuffer.array(), Integer.BYTES - numBytes, numBytes);
}
buffer.position(0);
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class VSizeColumnarInts method readFromByteBuffer.
public static VSizeColumnarInts readFromByteBuffer(ByteBuffer buffer) {
byte versionFromBuffer = buffer.get();
if (VERSION == versionFromBuffer) {
int numBytes = buffer.get();
int size = buffer.getInt();
ByteBuffer bufferToUse = buffer.asReadOnlyBuffer();
bufferToUse.limit(bufferToUse.position() + size);
buffer.position(bufferToUse.limit());
return new VSizeColumnarInts(bufferToUse, numBytes);
}
throw new IAE("Unknown version[%s]", versionFromBuffer);
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNBySegment.
@Test
public void testTopNBySegment() {
final HashMap<String, Object> specialContext = new HashMap<String, Object>();
specialContext.put(QueryContexts.BY_SEGMENT_KEY, "true");
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(4).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).context(specialContext).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.of("addRowsIndexConstant", 5356.814783D, "index", 5351.814783D, QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L), ImmutableMap.of("addRowsIndexConstant", 4880.669692D, "index", 4875.669692D, QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L), ImmutableMap.of("addRowsIndexConstant", 2250.876812D, "index", 2231.876812D, QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 18L)))));
Sequence<Result<TopNResultValue>> results = runWithMerge(query);
List<Result<BySegmentTopNResultValue>> resultList = results.map((Result<TopNResultValue> input) -> {
// Stupid type erasure
Object val = input.getValue();
if (val instanceof BySegmentResultValue) {
BySegmentResultValue bySegVal = (BySegmentResultValue) val;
return new Result<>(input.getTimestamp(), new BySegmentTopNResultValue(Lists.transform(bySegVal.getResults(), res -> {
if (Preconditions.checkNotNull(res) instanceof Result) {
Result theResult = (Result) res;
Object resVal = theResult.getValue();
if (resVal instanceof TopNResultValue) {
return new Result<>(theResult.getTimestamp(), (TopNResultValue) resVal);
}
}
throw new IAE("Bad input: [%s]", res);
}), bySegVal.getSegmentId(), bySegVal.getInterval()));
}
throw new ISE("Bad type");
}).toList();
Result<BySegmentTopNResultValue> result = resultList.get(0);
TestHelper.assertExpectedResults(expectedResults, result.getValue().getResults());
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class JoinableFactoryWrapper method computeJoinDataSourceCacheKey.
/**
* Compute a cache key prefix for a join data source. This includes the data sources that participate in the RHS of a
* join as well as any query specific constructs associated with join data source such as base table filter. This key prefix
* can be used in segment level cache or result level cache. The function can return following wrapped in an
* Optional
* - Non-empty byte array - If there is join datasource involved and caching is possible. The result includes
* join condition expression, join type and cache key returned by joinable factory for each {@link PreJoinableClause}
* - NULL - There is a join but caching is not possible. It may happen if one of the participating datasource
* in the JOIN is not cacheable.
*
* @param dataSourceAnalysis for the join datasource
*
* @return the optional cache key to be used as part of query cache key
*
* @throws {@link IAE} if this operation is called on a non-join data source
*/
public Optional<byte[]> computeJoinDataSourceCacheKey(final DataSourceAnalysis dataSourceAnalysis) {
final List<PreJoinableClause> clauses = dataSourceAnalysis.getPreJoinableClauses();
if (clauses.isEmpty()) {
throw new IAE("No join clauses to build the cache key for data source [%s]", dataSourceAnalysis.getDataSource());
}
final CacheKeyBuilder keyBuilder;
keyBuilder = new CacheKeyBuilder(JOIN_OPERATION);
if (dataSourceAnalysis.getJoinBaseTableFilter().isPresent()) {
keyBuilder.appendCacheable(dataSourceAnalysis.getJoinBaseTableFilter().get());
}
for (PreJoinableClause clause : clauses) {
Optional<byte[]> bytes = joinableFactory.computeJoinCacheKey(clause.getDataSource(), clause.getCondition());
if (!bytes.isPresent()) {
// Encountered a data source which didn't support cache yet
log.debug("skipping caching for join since [%s] does not support caching", clause.getDataSource());
return Optional.empty();
}
keyBuilder.appendByteArray(bytes.get());
keyBuilder.appendString(clause.getCondition().getOriginalExpression());
keyBuilder.appendString(clause.getPrefix());
keyBuilder.appendString(clause.getJoinType().name());
}
return Optional.of(keyBuilder.build());
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class DictionaryEncodedColumnPartSerde method getDeserializer.
@Override
public Deserializer getDeserializer() {
return new Deserializer() {
@Override
public void read(ByteBuffer buffer, ColumnBuilder builder, ColumnConfig columnConfig) {
final VERSION rVersion = VERSION.fromByte(buffer.get());
final int rFlags;
if (rVersion.compareTo(VERSION.COMPRESSED) >= 0) {
rFlags = buffer.getInt();
} else {
rFlags = rVersion.equals(VERSION.UNCOMPRESSED_MULTI_VALUE) ? Feature.MULTI_VALUE.getMask() : NO_FLAGS;
}
final boolean hasMultipleValues = Feature.MULTI_VALUE.isSet(rFlags) || Feature.MULTI_VALUE_V3.isSet(rFlags);
// Duplicate the first buffer since we are reading the dictionary twice.
final GenericIndexed<String> rDictionary = GenericIndexed.read(buffer.duplicate(), GenericIndexed.STRING_STRATEGY, builder.getFileMapper());
final GenericIndexed<ByteBuffer> rDictionaryUtf8 = GenericIndexed.read(buffer, GenericIndexed.BYTE_BUFFER_STRATEGY, builder.getFileMapper());
builder.setType(ValueType.STRING);
final WritableSupplier<ColumnarInts> rSingleValuedColumn;
final WritableSupplier<ColumnarMultiInts> rMultiValuedColumn;
if (hasMultipleValues) {
rMultiValuedColumn = readMultiValuedColumn(rVersion, buffer, rFlags);
rSingleValuedColumn = null;
} else {
rSingleValuedColumn = readSingleValuedColumn(rVersion, buffer);
rMultiValuedColumn = null;
}
final String firstDictionaryEntry = rDictionary.get(0);
DictionaryEncodedColumnSupplier dictionaryEncodedColumnSupplier = new DictionaryEncodedColumnSupplier(rDictionary, rDictionaryUtf8, rSingleValuedColumn, rMultiValuedColumn, columnConfig.columnCacheSizeBytes());
builder.setHasMultipleValues(hasMultipleValues).setHasNulls(firstDictionaryEntry == null).setDictionaryEncodedColumnSupplier(dictionaryEncodedColumnSupplier);
if (!Feature.NO_BITMAP_INDEX.isSet(rFlags)) {
GenericIndexed<ImmutableBitmap> rBitmaps = GenericIndexed.read(buffer, bitmapSerdeFactory.getObjectStrategy(), builder.getFileMapper());
builder.setBitmapIndex(new StringBitmapIndexColumnPartSupplier(bitmapSerdeFactory.getBitmapFactory(), rBitmaps, rDictionary));
}
if (buffer.hasRemaining()) {
ImmutableRTree rSpatialIndex = new ImmutableRTreeObjectStrategy(bitmapSerdeFactory.getBitmapFactory()).fromByteBufferWithSize(buffer);
builder.setSpatialIndex(new SpatialIndexColumnPartSupplier(rSpatialIndex));
}
}
private WritableSupplier<ColumnarInts> readSingleValuedColumn(VERSION version, ByteBuffer buffer) {
switch(version) {
case UNCOMPRESSED_SINGLE_VALUE:
case UNCOMPRESSED_WITH_FLAGS:
return VSizeColumnarInts.readFromByteBuffer(buffer);
case COMPRESSED:
return CompressedVSizeColumnarIntsSupplier.fromByteBuffer(buffer, byteOrder);
default:
throw new IAE("Unsupported single-value version[%s]", version);
}
}
private WritableSupplier<ColumnarMultiInts> readMultiValuedColumn(VERSION version, ByteBuffer buffer, int flags) {
switch(version) {
case UNCOMPRESSED_MULTI_VALUE:
{
return VSizeColumnarMultiInts.readFromByteBuffer(buffer);
}
case UNCOMPRESSED_WITH_FLAGS:
{
if (Feature.MULTI_VALUE.isSet(flags)) {
return VSizeColumnarMultiInts.readFromByteBuffer(buffer);
} else {
throw new IAE("Unrecognized multi-value flag[%d] for version[%s]", flags, version);
}
}
case COMPRESSED:
{
if (Feature.MULTI_VALUE.isSet(flags)) {
return CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(buffer, byteOrder);
} else if (Feature.MULTI_VALUE_V3.isSet(flags)) {
return V3CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(buffer, byteOrder);
} else {
throw new IAE("Unrecognized multi-value flag[%d] for version[%s]", flags, version);
}
}
default:
throw new IAE("Unsupported multi-value version[%s]", version);
}
}
};
}
Aggregations