Search in sources :

Example 6 with ByteBufferKeyValue

use of org.apache.hadoop.hbase.ByteBufferKeyValue in project hbase by apache.

the class TestWALCellCodecWithCompression method createOffheapKV.

private ByteBufferKeyValue createOffheapKV(int noOfTags) {
    byte[] row = Bytes.toBytes("myRow");
    byte[] cf = Bytes.toBytes("myCF");
    byte[] q = Bytes.toBytes("myQualifier");
    byte[] value = Bytes.toBytes("myValue");
    List<Tag> tags = new ArrayList<>(noOfTags);
    for (int i = 1; i <= noOfTags; i++) {
        tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i)));
    }
    KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags);
    ByteBuffer dbb = ByteBuffer.allocateDirect(kv.getBuffer().length);
    dbb.put(kv.getBuffer());
    return new ByteBufferKeyValue(dbb, 0, kv.getBuffer().length);
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ByteBuffer(java.nio.ByteBuffer)

Example 7 with ByteBufferKeyValue

use of org.apache.hadoop.hbase.ByteBufferKeyValue in project hbase by apache.

the class TestTagCompressionContext method createOffheapKVWithTags.

private Cell createOffheapKVWithTags(int noOfTags) {
    List<Tag> tags = new ArrayList<>();
    for (int i = 0; i < noOfTags; i++) {
        tags.add(new ArrayBackedTag((byte) i, "tagValue" + i));
    }
    KeyValue kv = new KeyValue(ROW, CF, Q, 1234L, V, tags);
    ByteBuffer dbb = ByteBuffer.allocateDirect(kv.getBuffer().length);
    ByteBufferUtils.copyFromArrayToBuffer(dbb, kv.getBuffer(), 0, kv.getBuffer().length);
    ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, 0, kv.getBuffer().length, 0);
    return offheapKV;
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ByteBuffer(java.nio.ByteBuffer)

Example 8 with ByteBufferKeyValue

use of org.apache.hadoop.hbase.ByteBufferKeyValue in project hbase by apache.

the class RedundantKVGenerator method generateTestExtendedOffheapKeyValues.

/**
   * Generate test data useful to test encoders.
   * @param howMany How many Key values should be generated.
   * @return sorted list of key values
   */
public List<Cell> generateTestExtendedOffheapKeyValues(int howMany, boolean useTags) {
    List<Cell> result = new ArrayList<>();
    List<byte[]> rows = generateRows();
    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
    if (family == null) {
        family = new byte[columnFamilyLength];
        randomizer.nextBytes(family);
    }
    long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
    byte[] value = new byte[valueLength];
    for (int i = 0; i < howMany; ++i) {
        long timestamp = baseTimestamp;
        if (timestampDiffSize > 0) {
            timestamp += randomizer.nextInt(timestampDiffSize);
        }
        Integer rowId = randomizer.nextInt(rows.size());
        byte[] row = rows.get(rowId);
        // generate qualifier, sometimes it is same, sometimes similar,
        // occasionally completely different
        byte[] qualifier;
        float qualifierChance = randomizer.nextFloat();
        if (!rowsToQualifier.containsKey(rowId) || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
            int qualifierLength = averageQualifierLength;
            qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1) - qualifierLengthVariance;
            qualifier = new byte[qualifierLength];
            randomizer.nextBytes(qualifier);
            // add it to map
            if (!rowsToQualifier.containsKey(rowId)) {
                rowsToQualifier.put(rowId, new ArrayList<>());
            }
            rowsToQualifier.get(rowId).add(qualifier);
        } else if (qualifierChance > chanceForSameQualifier) {
            // similar qualifier
            List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
            byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
            qualifier = new byte[originalQualifier.length];
            int commonPrefix = randomizer.nextInt(qualifier.length);
            System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
            for (int j = commonPrefix; j < qualifier.length; ++j) {
                qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
            }
            rowsToQualifier.get(rowId).add(qualifier);
        } else {
            // same qualifier
            List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
            qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
        }
        if (randomizer.nextFloat() < chanceForZeroValue) {
            for (int j = 0; j < value.length; ++j) {
                value[j] = (byte) 0;
            }
        } else {
            randomizer.nextBytes(value);
        }
        if (useTags) {
            KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new ArrayBackedTag((byte) 1, "value1") });
            ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
            ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(), keyValue.getOffset(), keyValue.getLength());
            ByteBufferKeyValue offheapKV = new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
            result.add(offheapKV);
        } else {
            KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value);
            ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
            ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(), keyValue.getOffset(), keyValue.getLength());
            ByteBufferKeyValue offheapKV = new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
            result.add(offheapKV);
        }
    }
    Collections.sort(result, CellComparator.COMPARATOR);
    return result;
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ByteBuffer(java.nio.ByteBuffer) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) ArrayList(java.util.ArrayList) List(java.util.List) Cell(org.apache.hadoop.hbase.Cell)

Example 9 with ByteBufferKeyValue

use of org.apache.hadoop.hbase.ByteBufferKeyValue in project hbase by apache.

the class TestMemStoreLAB method testLABRandomAllocation.

/**
   * Test a bunch of random allocations
   */
@Test
public void testLABRandomAllocation() {
    Random rand = new Random();
    MemStoreLAB mslab = new MemStoreLABImpl();
    int expectedOff = 0;
    ByteBuffer lastBuffer = null;
    // behavior
    for (int i = 0; i < 100000; i++) {
        int valSize = rand.nextInt(1000);
        KeyValue kv = new KeyValue(rk, cf, q, new byte[valSize]);
        int size = KeyValueUtil.length(kv);
        ByteBufferKeyValue newKv = (ByteBufferKeyValue) mslab.copyCellInto(kv);
        if (newKv.getBuffer() != lastBuffer) {
            expectedOff = 0;
            lastBuffer = newKv.getBuffer();
        }
        assertEquals(expectedOff, newKv.getOffset());
        assertTrue("Allocation overruns buffer", newKv.getOffset() + size <= newKv.getBuffer().capacity());
        expectedOff += size;
    }
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) Random(java.util.Random) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 10 with ByteBufferKeyValue

use of org.apache.hadoop.hbase.ByteBufferKeyValue in project hbase by apache.

the class TestMemStoreLAB method testLABThreading.

/**
   * Test allocation from lots of threads, making sure the results don't
   * overlap in any way
   */
@Test
public void testLABThreading() throws Exception {
    Configuration conf = new Configuration();
    MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext(conf);
    final AtomicInteger totalAllocated = new AtomicInteger();
    final MemStoreLAB mslab = new MemStoreLABImpl();
    List<List<AllocRecord>> allocations = Lists.newArrayList();
    for (int i = 0; i < 10; i++) {
        final List<AllocRecord> allocsByThisThread = Lists.newLinkedList();
        allocations.add(allocsByThisThread);
        TestThread t = new MultithreadedTestUtil.RepeatingTestThread(ctx) {

            private Random r = new Random();

            @Override
            public void doAnAction() throws Exception {
                int valSize = r.nextInt(1000);
                KeyValue kv = new KeyValue(rk, cf, q, new byte[valSize]);
                int size = KeyValueUtil.length(kv);
                ByteBufferKeyValue newCell = (ByteBufferKeyValue) mslab.copyCellInto(kv);
                totalAllocated.addAndGet(size);
                allocsByThisThread.add(new AllocRecord(newCell.getBuffer(), newCell.getOffset(), size));
            }
        };
        ctx.addThread(t);
    }
    ctx.startThreads();
    while (totalAllocated.get() < 50 * 1024 * 1024 && ctx.shouldRun()) {
        Thread.sleep(10);
    }
    ctx.stop();
    // Partition the allocations by the actual byte[] they point into,
    // make sure offsets are unique for each chunk
    Map<ByteBuffer, Map<Integer, AllocRecord>> mapsByChunk = Maps.newHashMap();
    int sizeCounted = 0;
    for (AllocRecord rec : Iterables.concat(allocations)) {
        sizeCounted += rec.size;
        if (rec.size == 0)
            continue;
        Map<Integer, AllocRecord> mapForThisByteArray = mapsByChunk.get(rec.alloc);
        if (mapForThisByteArray == null) {
            mapForThisByteArray = Maps.newTreeMap();
            mapsByChunk.put(rec.alloc, mapForThisByteArray);
        }
        AllocRecord oldVal = mapForThisByteArray.put(rec.offset, rec);
        assertNull("Already had an entry " + oldVal + " for allocation " + rec, oldVal);
    }
    assertEquals("Sanity check test", sizeCounted, totalAllocated.get());
    // Now check each byte array to make sure allocations don't overlap
    for (Map<Integer, AllocRecord> allocsInChunk : mapsByChunk.values()) {
        int expectedOff = 0;
        for (AllocRecord alloc : allocsInChunk.values()) {
            assertEquals(expectedOff, alloc.offset);
            assertTrue("Allocation overruns buffer", alloc.offset + alloc.size <= alloc.alloc.capacity());
            expectedOff += alloc.size;
        }
    }
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) TestThread(org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread) ByteBuffer(java.nio.ByteBuffer) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) Random(java.util.Random) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MultithreadedTestUtil(org.apache.hadoop.hbase.MultithreadedTestUtil) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Test(org.junit.Test)

Aggregations

ByteBufferKeyValue (org.apache.hadoop.hbase.ByteBufferKeyValue)10 ByteBuffer (java.nio.ByteBuffer)8 KeyValue (org.apache.hadoop.hbase.KeyValue)8 Test (org.junit.Test)5 ArrayList (java.util.ArrayList)4 Random (java.util.Random)3 Configuration (org.apache.hadoop.conf.Configuration)3 ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)3 Cell (org.apache.hadoop.hbase.Cell)3 List (java.util.List)2 Path (org.apache.hadoop.fs.Path)2 Tag (org.apache.hadoop.hbase.Tag)2 HashMap (java.util.HashMap)1 Map (java.util.Map)1 TreeMap (java.util.TreeMap)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)1 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)1 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)1