use of org.apache.druid.segment.data.BitmapSerdeFactory in project druid by druid-io.
the class DictionaryEncodedColumnMerger method writeIndexes.
@Override
public void writeIndexes(@Nullable List<IntBuffer> segmentRowNumConversions) throws IOException {
if (!capabilities.hasBitmapIndexes()) {
return;
}
long dimStartTime = System.currentTimeMillis();
final BitmapSerdeFactory bitmapSerdeFactory = indexSpec.getBitmapSerdeFactory();
String bmpFilename = StringUtils.format("%s.inverted", dimensionName);
bitmapWriter = new GenericIndexedWriter<>(segmentWriteOutMedium, bmpFilename, indexSpec.getBitmapSerdeFactory().getObjectStrategy());
bitmapWriter.open();
bitmapWriter.setObjectsNotSorted();
BitmapFactory bitmapFactory = bitmapSerdeFactory.getBitmapFactory();
ExtendedIndexesMerger extendedIndexesMerger = getExtendedIndexesMerger();
if (extendedIndexesMerger != null) {
extendedIndexesMerger.initialize();
}
IndexSeeker[] dictIdSeeker = toIndexSeekers(adapters, dimConversions, dimensionName);
// Iterate all dim values's dictionary id in ascending order which in line with dim values's compare result.
for (int dictId = 0; dictId < dictionarySize; dictId++) {
progress.progress();
MutableBitmap mergedIndexes = mergeBitmaps(segmentRowNumConversions, bitmapFactory, dictIdSeeker, dictId);
if (extendedIndexesMerger != null) {
extendedIndexesMerger.mergeIndexes(dictId, mergedIndexes);
}
}
if (extendedIndexesMerger != null) {
extendedIndexesMerger.write();
}
log.debug("Completed dim[%s] inverted with cardinality[%,d] in %,d millis.", dimensionName, dictionarySize, System.currentTimeMillis() - dimStartTime);
if (dictionaryMergeIterator != null) {
dictionaryMergeIterator.close();
}
}
use of org.apache.druid.segment.data.BitmapSerdeFactory in project druid by druid-io.
the class StringDimensionMergerV9 method makeColumnDescriptor.
@Override
public ColumnDescriptor makeColumnDescriptor() {
// Now write everything
boolean hasMultiValue = capabilities.hasMultipleValues().isTrue();
final CompressionStrategy compressionStrategy = indexSpec.getDimensionCompression();
final BitmapSerdeFactory bitmapSerdeFactory = indexSpec.getBitmapSerdeFactory();
final ColumnDescriptor.Builder builder = ColumnDescriptor.builder();
builder.setValueType(ValueType.STRING);
builder.setHasMultipleValues(hasMultiValue);
final DictionaryEncodedColumnPartSerde.SerializerBuilder partBuilder = DictionaryEncodedColumnPartSerde.serializerBuilder().withDictionary(dictionaryWriter).withValue(encodedValueSerializer, hasMultiValue, compressionStrategy != CompressionStrategy.UNCOMPRESSED).withBitmapSerdeFactory(bitmapSerdeFactory).withBitmapIndex(bitmapWriter).withSpatialIndex(spatialWriter).withByteOrder(IndexIO.BYTE_ORDER);
return builder.addSerde(partBuilder.build()).build();
}
use of org.apache.druid.segment.data.BitmapSerdeFactory in project druid by druid-io.
the class BaseFilterTest method makeConstructors.
public static Collection<Object[]> makeConstructors() {
final List<Object[]> constructors = new ArrayList<>();
final Map<String, BitmapSerdeFactory> bitmapSerdeFactories = ImmutableMap.of("concise", new ConciseBitmapSerdeFactory(), "roaring", new RoaringBitmapSerdeFactory(true));
final Map<String, SegmentWriteOutMediumFactory> segmentWriteOutMediumFactories = ImmutableMap.of("tmpFile segment write-out medium", TmpFileSegmentWriteOutMediumFactory.instance(), "off-heap memory segment write-out medium", OffHeapMemorySegmentWriteOutMediumFactory.instance());
final Map<String, Function<IndexBuilder, Pair<StorageAdapter, Closeable>>> finishers = ImmutableMap.<String, Function<IndexBuilder, Pair<StorageAdapter, Closeable>>>builder().put("incremental", input -> {
final IncrementalIndex index = input.buildIncrementalIndex();
return Pair.of(new IncrementalIndexStorageAdapter(index), index);
}).put("mmapped", input -> {
final QueryableIndex index = input.buildMMappedIndex();
return Pair.of(new QueryableIndexStorageAdapter(index), index);
}).put("mmappedMerged", input -> {
final QueryableIndex index = input.buildMMappedMergedIndex();
return Pair.of(new QueryableIndexStorageAdapter(index), index);
}).put("rowBasedWithoutTypeSignature", input -> Pair.of(input.buildRowBasedSegmentWithoutTypeSignature().asStorageAdapter(), () -> {
})).put("rowBasedWithTypeSignature", input -> Pair.of(input.buildRowBasedSegmentWithTypeSignature().asStorageAdapter(), () -> {
})).build();
for (Map.Entry<String, BitmapSerdeFactory> bitmapSerdeFactoryEntry : bitmapSerdeFactories.entrySet()) {
for (Map.Entry<String, SegmentWriteOutMediumFactory> segmentWriteOutMediumFactoryEntry : segmentWriteOutMediumFactories.entrySet()) {
for (Map.Entry<String, Function<IndexBuilder, Pair<StorageAdapter, Closeable>>> finisherEntry : finishers.entrySet()) {
for (boolean cnf : ImmutableList.of(false, true)) {
for (boolean optimize : ImmutableList.of(false, true)) {
final String testName = StringUtils.format("bitmaps[%s], indexMerger[%s], finisher[%s], cnf[%s], optimize[%s]", bitmapSerdeFactoryEntry.getKey(), segmentWriteOutMediumFactoryEntry.getKey(), finisherEntry.getKey(), cnf, optimize);
final IndexBuilder indexBuilder = IndexBuilder.create().schema(DEFAULT_INDEX_SCHEMA).indexSpec(new IndexSpec(bitmapSerdeFactoryEntry.getValue(), null, null, null)).segmentWriteOutMediumFactory(segmentWriteOutMediumFactoryEntry.getValue());
constructors.add(new Object[] { testName, indexBuilder, finisherEntry.getValue(), cnf, optimize });
}
}
}
}
}
return constructors;
}
Aggregations