use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class LikeFilterBenchmark method setup.
@Setup
public void setup() {
step = (END_INT - START_INT) / cardinality;
final BitmapFactory bitmapFactory = new RoaringBitmapFactory();
final BitmapSerdeFactory serdeFactory = new RoaringBitmapSerdeFactory(null);
final List<Integer> ints = generateInts();
final GenericIndexed<String> dictionary = GenericIndexed.fromIterable(FluentIterable.from(ints).transform(new Function<Integer, String>() {
@Override
public String apply(Integer i) {
return i.toString();
}
}), GenericIndexed.STRING_STRATEGY);
final BitmapIndex bitmapIndex = new StringBitmapIndexColumnPartSupplier(bitmapFactory, GenericIndexed.fromIterable(FluentIterable.from(ints).transform(new Function<Integer, ImmutableBitmap>() {
@Override
public ImmutableBitmap apply(Integer i) {
final MutableBitmap mutableBitmap = bitmapFactory.makeEmptyMutableBitmap();
mutableBitmap.add((i - START_INT) / step);
return bitmapFactory.makeImmutableBitmap(mutableBitmap);
}
}), serdeFactory.getObjectStrategy()), dictionary).get();
selector = new MockBitmapIndexSelector(dictionary, bitmapFactory, bitmapIndex);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project hive by apache.
the class TestDruidRecordWriter method testWrite.
// Test is failing due to Guava dependency, Druid 0.13.0 should have less dependency on Guava
@Ignore
@Test
public void testWrite() throws IOException, SegmentLoadingException {
final String dataSourceName = "testDataSource";
final File segmentOutputDir = temporaryFolder.newFolder();
final File workingDir = temporaryFolder.newFolder();
Configuration config = new Configuration();
final InputRowParser inputRowParser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec(DruidConstants.DEFAULT_TIMESTAMP_COLUMN, "auto", null), new DimensionsSpec(ImmutableList.of(new StringDimensionSchema("host")), null, null)));
final Map<String, Object> parserMap = objectMapper.convertValue(inputRowParser, new TypeReference<Map<String, Object>>() {
});
DataSchema dataSchema = new DataSchema(dataSourceName, parserMap, new AggregatorFactory[] { new LongSumAggregatorFactory("visited_sum", "visited_sum"), new HyperUniquesAggregatorFactory("unique_hosts", "unique_hosts") }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, ImmutableList.of(INTERVAL_FULL)), null, objectMapper);
IndexSpec indexSpec = new IndexSpec(new RoaringBitmapSerdeFactory(true), null, null, null);
RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, null, null, null, temporaryFolder.newFolder(), null, null, null, null, indexSpec, null, null, 0, 0, null, null, 0L, null, null);
LocalFileSystem localFileSystem = FileSystem.getLocal(config);
DataSegmentPusher dataSegmentPusher = new LocalDataSegmentPusher(new LocalDataSegmentPusherConfig() {
@Override
public File getStorageDirectory() {
return segmentOutputDir;
}
});
Path segmentDescriptorPath = new Path(workingDir.getAbsolutePath(), DruidStorageHandler.SEGMENTS_DESCRIPTOR_DIR_NAME);
DruidRecordWriter druidRecordWriter = new DruidRecordWriter(dataSchema, tuningConfig, dataSegmentPusher, 20, segmentDescriptorPath, localFileSystem);
List<DruidWritable> druidWritables = expectedRows.stream().map(input -> new DruidWritable(ImmutableMap.<String, Object>builder().putAll(input).put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, Granularities.DAY.bucketStart(new DateTime((long) input.get(DruidConstants.DEFAULT_TIMESTAMP_COLUMN))).getMillis()).build())).collect(Collectors.toList());
for (DruidWritable druidWritable : druidWritables) {
druidRecordWriter.write(druidWritable);
}
druidRecordWriter.close(false);
List<DataSegment> dataSegmentList = DruidStorageHandlerUtils.getCreatedSegments(segmentDescriptorPath, config);
Assert.assertEquals(1, dataSegmentList.size());
File tmpUnzippedSegmentDir = temporaryFolder.newFolder();
new LocalDataSegmentPuller().getSegmentFiles(dataSegmentList.get(0), tmpUnzippedSegmentDir);
final QueryableIndex queryableIndex = DruidStorageHandlerUtils.INDEX_IO.loadIndex(tmpUnzippedSegmentDir);
QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(queryableIndex);
Firehose firehose = new IngestSegmentFirehose(ImmutableList.of(new WindowedStorageAdapter(adapter, adapter.getInterval())), null, ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), null);
List<InputRow> rows = Lists.newArrayList();
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
verifyRows(expectedRows, rows);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class BoundFilterBenchmark method setup.
@Setup
public void setup() {
step = (END_INT - START_INT) / cardinality;
final BitmapFactory bitmapFactory = new RoaringBitmapFactory();
final BitmapSerdeFactory serdeFactory = new RoaringBitmapSerdeFactory(null);
final List<Integer> ints = generateInts();
final GenericIndexed<String> dictionary = GenericIndexed.fromIterable(FluentIterable.from(ints).transform(i -> i.toString()), GenericIndexed.STRING_STRATEGY);
final BitmapIndex bitmapIndex = new StringBitmapIndexColumnPartSupplier(bitmapFactory, GenericIndexed.fromIterable(FluentIterable.from(ints).transform(new Function<Integer, ImmutableBitmap>() {
@Override
public ImmutableBitmap apply(Integer i) {
final MutableBitmap mutableBitmap = bitmapFactory.makeEmptyMutableBitmap();
mutableBitmap.add((i - START_INT) / step);
return bitmapFactory.makeImmutableBitmap(mutableBitmap);
}
}), serdeFactory.getObjectStrategy()), dictionary).get();
selector = new MockBitmapIndexSelector(dictionary, bitmapFactory, bitmapIndex);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class DimensionPredicateFilterBenchmark method setup.
@Setup
public void setup() {
final BitmapFactory bitmapFactory = new RoaringBitmapFactory();
final BitmapSerdeFactory serdeFactory = new RoaringBitmapSerdeFactory(null);
final List<Integer> ints = generateInts();
final GenericIndexed<String> dictionary = GenericIndexed.fromIterable(FluentIterable.from(ints).transform(new Function<Integer, String>() {
@Override
public String apply(Integer i) {
return i.toString();
}
}), GenericIndexed.STRING_STRATEGY);
final BitmapIndex bitmapIndex = new StringBitmapIndexColumnPartSupplier(bitmapFactory, GenericIndexed.fromIterable(FluentIterable.from(ints).transform(new Function<Integer, ImmutableBitmap>() {
@Override
public ImmutableBitmap apply(Integer i) {
final MutableBitmap mutableBitmap = bitmapFactory.makeEmptyMutableBitmap();
mutableBitmap.add(i - START_INT);
return bitmapFactory.makeImmutableBitmap(mutableBitmap);
}
}), serdeFactory.getObjectStrategy()), dictionary).get();
selector = new MockBitmapIndexSelector(dictionary, bitmapFactory, bitmapIndex);
}
use of org.apache.druid.segment.data.RoaringBitmapSerdeFactory in project druid by druid-io.
the class ParallelIndexTuningConfigTest method testSerdeWithMaxNumSubTasksAndMaxNumConcurrentSubTasks.
@Test
public void testSerdeWithMaxNumSubTasksAndMaxNumConcurrentSubTasks() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Can't use both maxNumSubTasks and maxNumConcurrentSubTasks");
final int maxNumSubTasks = 250;
final ParallelIndexTuningConfig tuningConfig = new ParallelIndexTuningConfig(null, null, null, 10, 1000L, null, null, null, null, new DynamicPartitionsSpec(100, 100L), new IndexSpec(new RoaringBitmapSerdeFactory(true), CompressionStrategy.UNCOMPRESSED, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(), 1, false, true, 10000L, OffHeapMemorySegmentWriteOutMediumFactory.instance(), maxNumSubTasks, maxNumSubTasks, 100, 20L, new Duration(3600), 128, null, null, false, null, null, null, null, null);
}
Aggregations