use of io.trino.orc.protobuf.CodedInputStream in project trino by trinodb.
the class TestOrcBloomFilters method testOrcHiveBloomFilterSerde.
@Test
public void testOrcHiveBloomFilterSerde() throws Exception {
BloomFilter bloomFilterWrite = new BloomFilter(1000L, 0.05);
bloomFilterWrite.add(TEST_STRING);
assertTrue(bloomFilterWrite.test(TEST_STRING));
assertTrue(bloomFilterWrite.testSlice(wrappedBuffer(TEST_STRING)));
Slice bloomFilterBytes = new CompressedMetadataWriter(new OrcMetadataWriter(WriterIdentification.TRINO), CompressionKind.NONE, 1024).writeBloomFilters(ImmutableList.of(bloomFilterWrite));
// Read through method
InputStream inputStream = bloomFilterBytes.getInput();
OrcMetadataReader metadataReader = new OrcMetadataReader();
List<BloomFilter> bloomFilters = metadataReader.readBloomFilterIndexes(inputStream);
assertEquals(bloomFilters.size(), 1);
assertTrue(bloomFilters.get(0).test(TEST_STRING));
assertTrue(bloomFilters.get(0).testSlice(wrappedBuffer(TEST_STRING)));
assertFalse(bloomFilters.get(0).test(TEST_STRING_NOT_WRITTEN));
assertFalse(bloomFilters.get(0).testSlice(wrappedBuffer(TEST_STRING_NOT_WRITTEN)));
assertEquals(bloomFilterWrite.getNumBits(), bloomFilters.get(0).getNumBits());
assertEquals(bloomFilterWrite.getNumHashFunctions(), bloomFilters.get(0).getNumHashFunctions());
// Validate bit set
assertTrue(Arrays.equals(bloomFilters.get(0).getBitSet(), bloomFilterWrite.getBitSet()));
// Read directly: allows better inspection of the bit sets (helped to fix a lot of bugs)
CodedInputStream input = CodedInputStream.newInstance(bloomFilterBytes.getBytes());
OrcProto.BloomFilterIndex deserializedBloomFilterIndex = OrcProto.BloomFilterIndex.parseFrom(input);
List<OrcProto.BloomFilter> bloomFilterList = deserializedBloomFilterIndex.getBloomFilterList();
assertEquals(bloomFilterList.size(), 1);
OrcProto.BloomFilter bloomFilterRead = bloomFilterList.get(0);
// Validate contents of ORC bloom filter bit set
assertTrue(Arrays.equals(Longs.toArray(bloomFilterRead.getBitsetList()), bloomFilterWrite.getBitSet()));
// hash functions
assertEquals(bloomFilterWrite.getNumHashFunctions(), bloomFilterRead.getNumHashFunctions());
// bit size
assertEquals(bloomFilterWrite.getBitSet().length, bloomFilterRead.getBitsetCount());
}
use of io.trino.orc.protobuf.CodedInputStream in project trino by trinodb.
the class OrcMetadataReader method readBloomFilterIndexes.
@Override
public List<BloomFilter> readBloomFilterIndexes(InputStream inputStream) throws IOException {
CodedInputStream input = CodedInputStream.newInstance(inputStream);
OrcProto.BloomFilterIndex bloomFilter = OrcProto.BloomFilterIndex.parseFrom(input);
List<OrcProto.BloomFilter> bloomFilterList = bloomFilter.getBloomFilterList();
ImmutableList.Builder<BloomFilter> builder = ImmutableList.builder();
for (OrcProto.BloomFilter orcBloomFilter : bloomFilterList) {
if (orcBloomFilter.hasUtf8Bitset()) {
ByteString utf8Bitset = orcBloomFilter.getUtf8Bitset();
long[] bits = new long[utf8Bitset.size() / 8];
utf8Bitset.asReadOnlyByteBuffer().order(ByteOrder.LITTLE_ENDIAN).asLongBuffer().get(bits);
builder.add(new BloomFilter(bits, orcBloomFilter.getNumHashFunctions()));
} else {
builder.add(new BloomFilter(Longs.toArray(orcBloomFilter.getBitsetList()), orcBloomFilter.getNumHashFunctions()));
}
}
return builder.build();
}
use of io.trino.orc.protobuf.CodedInputStream in project trino by trinodb.
the class OrcMetadataReader method readPostScript.
@Override
public PostScript readPostScript(InputStream inputStream) throws IOException {
CodedInputStream input = CodedInputStream.newInstance(inputStream);
OrcProto.PostScript postScript = OrcProto.PostScript.parseFrom(input);
return new PostScript(postScript.getVersionList(), postScript.getFooterLength(), postScript.getMetadataLength(), toCompression(postScript.getCompression()), postScript.getCompressionBlockSize(), toHiveWriterVersion(postScript.getWriterVersion()));
}
use of io.trino.orc.protobuf.CodedInputStream in project trino by trinodb.
the class OrcMetadataReader method readMetadata.
@Override
public Metadata readMetadata(HiveWriterVersion hiveWriterVersion, InputStream inputStream) throws IOException {
CodedInputStream input = CodedInputStream.newInstance(inputStream);
input.setSizeLimit(PROTOBUF_MESSAGE_MAX_LIMIT);
OrcProto.Metadata metadata = OrcProto.Metadata.parseFrom(input);
return new Metadata(toStripeStatistics(hiveWriterVersion, metadata.getStripeStatsList()));
}
use of io.trino.orc.protobuf.CodedInputStream in project trino by trinodb.
the class OrcMetadataReader method readStripeFooter.
@Override
public StripeFooter readStripeFooter(ColumnMetadata<OrcType> types, InputStream inputStream, ZoneId legacyFileTimeZone) throws IOException {
CodedInputStream input = CodedInputStream.newInstance(inputStream);
OrcProto.StripeFooter stripeFooter = OrcProto.StripeFooter.parseFrom(input);
return new StripeFooter(toStream(stripeFooter.getStreamsList()), toColumnEncoding(stripeFooter.getColumnsList()), Optional.ofNullable(emptyToNull(stripeFooter.getWriterTimezone())).map(ZoneId::of).orElse(legacyFileTimeZone));
}
Aggregations