Search in sources :

Example 21 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestRAMCache method testAtomicRAMCache.

@Test
public void testAtomicRAMCache() throws Exception {
    int size = 100;
    int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
    byte[] byteArr = new byte[length];
    RAMCache cache = new RAMCache();
    BlockCacheKey key = new BlockCacheKey("file-1", 1);
    MockHFileBlock blk = new MockHFileBlock(BlockType.DATA, size, size, -1, ByteBuffer.wrap(byteArr, 0, size), HFileBlock.FILL_HEADER, -1, 52, -1, new HFileContextBuilder().build(), ByteBuffAllocator.HEAP);
    RAMQueueEntry re = new RAMQueueEntry(key, blk, 1, false);
    Assert.assertNull(cache.putIfAbsent(key, re));
    Assert.assertEquals(cache.putIfAbsent(key, re), re);
    CountDownLatch latch = new CountDownLatch(1);
    blk.setLatch(latch);
    AtomicBoolean error = new AtomicBoolean(false);
    Thread t1 = new Thread(() -> {
        try {
            cache.get(key);
        } catch (Exception e) {
            error.set(true);
        }
    });
    t1.start();
    Thread.sleep(200);
    AtomicBoolean removed = new AtomicBoolean(false);
    Thread t2 = new Thread(() -> {
        cache.remove(key);
        removed.set(true);
    });
    t2.start();
    Thread.sleep(200);
    Assert.assertFalse(removed.get());
    latch.countDown();
    Thread.sleep(200);
    Assert.assertTrue(removed.get());
    Assert.assertFalse(error.get());
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) RAMQueueEntry(org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMQueueEntry) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) RAMCache(org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMCache) CountDownLatch(java.util.concurrent.CountDownLatch) BlockCacheKey(org.apache.hadoop.hbase.io.hfile.BlockCacheKey) Test(org.junit.Test)

Example 22 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestBulkLoadReplication method createHFileForFamilies.

private String createHFileForFamilies(byte[] row, byte[] value, Configuration clusterConfig) throws IOException {
    CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.DEEP_COPY);
    cellBuilder.setRow(row).setFamily(TestReplicationBase.famName).setQualifier(Bytes.toBytes("1")).setValue(value).setType(Cell.Type.Put);
    HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(clusterConfig);
    // TODO We need a way to do this without creating files
    File hFileLocation = testFolder.newFile();
    FSDataOutputStream out = new FSDataOutputStream(new FileOutputStream(hFileLocation), null);
    try {
        hFileFactory.withOutputStream(out);
        hFileFactory.withFileContext(new HFileContextBuilder().build());
        HFile.Writer writer = hFileFactory.create();
        try {
            writer.append(new KeyValue(cellBuilder.build()));
        } finally {
            writer.close();
        }
    } finally {
        out.close();
    }
    return hFileLocation.getAbsoluteFile().getAbsolutePath();
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) CellBuilder(org.apache.hadoop.hbase.CellBuilder) FileOutputStream(java.io.FileOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile) HFile(org.apache.hadoop.hbase.io.hfile.HFile) File(java.io.File)

Example 23 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestBulkloadBase method createHFileForFamilies.

private String createHFileForFamilies(byte[] family) throws IOException {
    HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(conf);
    // TODO We need a way to do this without creating files
    File hFileLocation = testFolder.newFile(generateUniqueName(null));
    FSDataOutputStream out = new FSDataOutputStream(new FileOutputStream(hFileLocation), null);
    try {
        hFileFactory.withOutputStream(out);
        hFileFactory.withFileContext(new HFileContextBuilder().build());
        HFile.Writer writer = hFileFactory.create();
        try {
            writer.append(new KeyValue(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(randomBytes).setFamily(family).setQualifier(randomBytes).setTimestamp(0L).setType(KeyValue.Type.Put.getCode()).setValue(randomBytes).build()));
        } finally {
            writer.close();
        }
    } finally {
        out.close();
    }
    return hFileLocation.getAbsoluteFile().getAbsolutePath();
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) FileOutputStream(java.io.FileOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile) HFile(org.apache.hadoop.hbase.io.hfile.HFile) File(java.io.File)

Example 24 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class DataBlockEncodingTool method checkStatistics.

/**
 * Check statistics for given HFile for different data block encoders.
 * @param scanner Of file which will be compressed.
 * @param kvLimit Maximal count of KeyValue which will be processed.
 * @throws IOException thrown if scanner is invalid
 */
public void checkStatistics(final KeyValueScanner scanner, final int kvLimit) throws IOException {
    scanner.seek(KeyValue.LOWESTKEY);
    KeyValue currentKV;
    byte[] previousKey = null;
    byte[] currentKey;
    DataBlockEncoding[] encodings = DataBlockEncoding.values();
    ByteArrayOutputStream uncompressedOutputStream = new ByteArrayOutputStream();
    int j = 0;
    while ((currentKV = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
        // Iterates through key/value pairs
        j++;
        currentKey = currentKV.getKey();
        if (previousKey != null) {
            for (int i = 0; i < previousKey.length && i < currentKey.length && previousKey[i] == currentKey[i]; ++i) {
                totalKeyRedundancyLength++;
            }
        }
        // if the cell tagsLen equals 0, it means other cells may have tags.
        if (USE_TAG && currentKV.getTagsLength() == 0) {
            uncompressedOutputStream.write(currentKV.getBuffer(), currentKV.getOffset(), currentKV.getLength());
            // write tagsLen = 0.
            uncompressedOutputStream.write(Bytes.toBytes((short) 0));
        } else {
            uncompressedOutputStream.write(currentKV.getBuffer(), currentKV.getOffset(), currentKV.getLength());
        }
        if (includesMemstoreTS) {
            WritableUtils.writeVLong(new DataOutputStream(uncompressedOutputStream), currentKV.getSequenceId());
        }
        previousKey = currentKey;
        int kLen = currentKV.getKeyLength();
        int vLen = currentKV.getValueLength();
        int cfOffset = currentKV.getFamilyOffset();
        int cfLen = currentKV.getFamilyLength();
        int restLen = currentKV.getLength() - kLen - vLen;
        totalKeyLength += kLen;
        totalValueLength += vLen;
        totalPrefixLength += restLen;
        totalCFLength += cfLen;
    }
    rawKVs = uncompressedOutputStream.toByteArray();
    for (DataBlockEncoding encoding : encodings) {
        if (encoding == DataBlockEncoding.NONE) {
            continue;
        }
        DataBlockEncoder d = encoding.getEncoder();
        HFileContext meta = new HFileContextBuilder().withDataBlockEncoding(encoding).withCompression(Compression.Algorithm.NONE).withIncludesMvcc(includesMemstoreTS).withIncludesTags(USE_TAG).build();
        codecs.add(new EncodedDataBlock(conf, d, encoding, rawKVs, meta));
    }
}
Also used : DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) KeyValue(org.apache.hadoop.hbase.KeyValue) DataOutputStream(java.io.DataOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataBlockEncoder(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder) EncodedDataBlock(org.apache.hadoop.hbase.io.encoding.EncodedDataBlock) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 25 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestBulkLoadReplicationHFileRefs method createHFileForFamilies.

private String createHFileForFamilies(byte[] family) throws IOException {
    CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.DEEP_COPY);
    cellBuilder.setRow(row).setFamily(family).setQualifier(qualifier).setValue(value).setType(Cell.Type.Put);
    HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(UTIL1.getConfiguration());
    File hFileLocation = testFolder.newFile();
    FSDataOutputStream out = new FSDataOutputStream(new FileOutputStream(hFileLocation), null);
    try {
        hFileFactory.withOutputStream(out);
        hFileFactory.withFileContext(new HFileContextBuilder().build());
        HFile.Writer writer = hFileFactory.create();
        try {
            writer.append(new KeyValue(cellBuilder.build()));
        } finally {
            writer.close();
        }
    } finally {
        out.close();
    }
    return hFileLocation.getAbsoluteFile().getAbsolutePath();
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) CellBuilder(org.apache.hadoop.hbase.CellBuilder) FileOutputStream(java.io.FileOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile) HFile(org.apache.hadoop.hbase.io.hfile.HFile) File(java.io.File)

Aggregations

HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)89 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)82 Path (org.apache.hadoop.fs.Path)52 Test (org.junit.Test)48 KeyValue (org.apache.hadoop.hbase.KeyValue)39 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)27 FileSystem (org.apache.hadoop.fs.FileSystem)26 Cell (org.apache.hadoop.hbase.Cell)17 HFile (org.apache.hadoop.hbase.io.hfile.HFile)16 ByteBuffer (java.nio.ByteBuffer)15 Configuration (org.apache.hadoop.conf.Configuration)14 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)12 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)12 DataOutputStream (java.io.DataOutputStream)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)6 DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5