use of org.apache.hadoop.hbase.io.FSDataInputStreamWrapper in project hbase by apache.
the class TestHFileEncryption method testDataBlockEncryption.
@Test
public void testDataBlockEncryption() throws IOException {
final int blocks = 10;
final int[] blockSizes = new int[blocks];
for (int i = 0; i < blocks; i++) {
blockSizes[i] = (1024 + RNG.nextInt(1024 * 63)) / Bytes.SIZEOF_INT;
}
for (Compression.Algorithm compression : HBaseCommonTestingUtil.COMPRESSION_ALGORITHMS) {
Path path = new Path(TEST_UTIL.getDataTestDir(), "block_v3_" + compression + "_AES");
LOG.info("testDataBlockEncryption: encryption=AES compression=" + compression);
long totalSize = 0;
HFileContext fileContext = new HFileContextBuilder().withCompression(compression).withEncryptionContext(cryptoContext).build();
FSDataOutputStream os = fs.create(path);
try {
for (int i = 0; i < blocks; i++) {
totalSize += writeBlock(TEST_UTIL.getConfiguration(), os, fileContext, blockSizes[i]);
}
} finally {
os.close();
}
FSDataInputStream is = fs.open(path);
ReaderContext context = new ReaderContextBuilder().withInputStreamWrapper(new FSDataInputStreamWrapper(is)).withFilePath(path).withFileSystem(fs).withFileSize(totalSize).build();
try {
HFileBlock.FSReaderImpl hbr = new HFileBlock.FSReaderImpl(context, fileContext, ByteBuffAllocator.HEAP, TEST_UTIL.getConfiguration());
long pos = 0;
for (int i = 0; i < blocks; i++) {
pos += readAndVerifyBlock(pos, fileContext, hbr, blockSizes[i]);
}
} finally {
is.close();
}
}
}
Aggregations