use of io.druid.java.util.common.io.smoosh.SmooshedWriter in project druid by druid-io.
the class CompressedVSizeIntsIndexedWriterTest method checkV2SerializedSizeAndData.
private void checkV2SerializedSizeAndData(int chunkSize) throws Exception {
File tmpDirectory = FileUtils.getTempDirectory();
FileSmoosher smoosher = new FileSmoosher(tmpDirectory);
int maxValue = vals.length > 0 ? Ints.max(vals) : 0;
GenericIndexedWriter genericIndexed = new GenericIndexedWriter<>(ioPeon, "test", CompressedByteBufferObjectStrategy.getBufferForOrder(byteOrder, compressionStrategy, chunkSize * VSizeIndexedInts.getNumBytesForMax(maxValue) + CompressedVSizeIntsIndexedSupplier.bufferPadding(VSizeIndexedInts.getNumBytesForMax(maxValue))), Longs.BYTES * 10000);
CompressedVSizeIntsIndexedWriter writer = new CompressedVSizeIntsIndexedWriter(ioPeon, "test", vals.length > 0 ? Ints.max(vals) : 0, chunkSize, byteOrder, compressionStrategy, genericIndexed);
writer.open();
for (int val : vals) {
writer.add(val);
}
writer.close();
final SmooshedWriter channel = smoosher.addWithSmooshedWriter("test", writer.getSerializedSize());
writer.writeToChannel(channel, smoosher);
channel.close();
smoosher.close();
SmooshedFileMapper mapper = Smoosh.map(tmpDirectory);
CompressedVSizeIntsIndexedSupplier supplierFromByteBuffer = CompressedVSizeIntsIndexedSupplier.fromByteBuffer(mapper.mapFile("test"), byteOrder, mapper);
IndexedInts indexedInts = supplierFromByteBuffer.get();
for (int i = 0; i < vals.length; ++i) {
assertEquals(vals[i], indexedInts.get(i));
}
CloseQuietly.close(indexedInts);
mapper.close();
}
use of io.druid.java.util.common.io.smoosh.SmooshedWriter in project druid by druid-io.
the class CompressedIntsIndexedWriterTest method checkV2SerializedSizeAndData.
private void checkV2SerializedSizeAndData(int chunkFactor) throws Exception {
File tmpDirectory = Files.createTempDirectory(String.format("CompressedIntsIndexedWriterTest_%d", chunkFactor)).toFile();
FileSmoosher smoosher = new FileSmoosher(tmpDirectory);
final IOPeon ioPeon = new TmpFileIOPeon();
try {
CompressedIntsIndexedWriter writer = new CompressedIntsIndexedWriter(chunkFactor, compressionStrategy, new GenericIndexedWriter<>(ioPeon, "test", CompressedIntBufferObjectStrategy.getBufferForOrder(byteOrder, compressionStrategy, chunkFactor), Longs.BYTES * 10000));
writer.open();
for (int val : vals) {
writer.add(val);
}
writer.close();
final SmooshedWriter channel = smoosher.addWithSmooshedWriter("test", writer.getSerializedSize());
writer.writeToChannel(channel, smoosher);
channel.close();
smoosher.close();
SmooshedFileMapper mapper = Smoosh.map(tmpDirectory);
// read from ByteBuffer and check values
CompressedIntsIndexedSupplier supplierFromByteBuffer = CompressedIntsIndexedSupplier.fromByteBuffer(mapper.mapFile("test"), byteOrder, mapper);
IndexedInts indexedInts = supplierFromByteBuffer.get();
assertEquals(vals.length, indexedInts.size());
for (int i = 0; i < vals.length; ++i) {
assertEquals(vals[i], indexedInts.get(i));
}
CloseQuietly.close(indexedInts);
mapper.close();
} finally {
ioPeon.close();
}
}
use of io.druid.java.util.common.io.smoosh.SmooshedWriter in project druid by druid-io.
the class IndexMergerV9 method makeIndexBinary.
private void makeIndexBinary(final FileSmoosher v9Smoosher, final List<IndexableAdapter> adapters, final File outDir, final List<String> mergedDimensions, final List<String> mergedMetrics, final ProgressIndicator progress, final IndexSpec indexSpec, final List<DimensionMerger> mergers) throws IOException {
final String section = "make index.drd";
progress.startSection(section);
long startTime = System.currentTimeMillis();
final Set<String> finalDimensions = Sets.newLinkedHashSet();
final Set<String> finalColumns = Sets.newLinkedHashSet();
finalColumns.addAll(mergedMetrics);
for (int i = 0; i < mergedDimensions.size(); ++i) {
if (mergers.get(i).canSkip()) {
continue;
}
finalColumns.add(mergedDimensions.get(i));
finalDimensions.add(mergedDimensions.get(i));
}
GenericIndexed<String> cols = GenericIndexed.fromIterable(finalColumns, GenericIndexed.STRING_STRATEGY);
GenericIndexed<String> dims = GenericIndexed.fromIterable(finalDimensions, GenericIndexed.STRING_STRATEGY);
final String bitmapSerdeFactoryType = mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory());
final long numBytes = cols.getSerializedSize() + dims.getSerializedSize() + 16 + serializerUtils.getSerializedStringByteSize(bitmapSerdeFactoryType);
final SmooshedWriter writer = v9Smoosher.addWithSmooshedWriter("index.drd", numBytes);
cols.writeToChannel(writer);
dims.writeToChannel(writer);
DateTime minTime = new DateTime(JodaUtils.MAX_INSTANT);
DateTime maxTime = new DateTime(JodaUtils.MIN_INSTANT);
for (IndexableAdapter index : adapters) {
minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart());
maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd());
}
final Interval dataInterval = new Interval(minTime, maxTime);
serializerUtils.writeLong(writer, dataInterval.getStartMillis());
serializerUtils.writeLong(writer, dataInterval.getEndMillis());
serializerUtils.writeString(writer, bitmapSerdeFactoryType);
writer.close();
IndexIO.checkFileSize(new File(outDir, "index.drd"));
log.info("Completed index.drd in %,d millis.", System.currentTimeMillis() - startTime);
progress.stopSection(section);
}
Aggregations