Search in sources :

Example 36 with ArrayBackedTag

use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.

the class TestTagCompressionContext method createOffheapKVWithTags.

private Cell createOffheapKVWithTags(int noOfTags) {
    List<Tag> tags = new ArrayList<>();
    for (int i = 0; i < noOfTags; i++) {
        tags.add(new ArrayBackedTag((byte) i, "tagValue" + i));
    }
    KeyValue kv = new KeyValue(ROW, CF, Q, 1234L, V, tags);
    ByteBuffer dbb = ByteBuffer.allocateDirect(kv.getBuffer().length);
    ByteBufferUtils.copyFromArrayToBuffer(dbb, kv.getBuffer(), 0, kv.getBuffer().length);
    ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, 0, kv.getBuffer().length, 0);
    return offheapKV;
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ByteBuffer(java.nio.ByteBuffer)

Example 37 with ArrayBackedTag

use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.

the class RedundantKVGenerator method generateTestExtendedOffheapKeyValues.

/**
   * Generate test data useful to test encoders.
   * @param howMany How many Key values should be generated.
   * @return sorted list of key values
   */
public List<Cell> generateTestExtendedOffheapKeyValues(int howMany, boolean useTags) {
    List<Cell> result = new ArrayList<>();
    List<byte[]> rows = generateRows();
    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
    if (family == null) {
        family = new byte[columnFamilyLength];
        randomizer.nextBytes(family);
    }
    long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
    byte[] value = new byte[valueLength];
    for (int i = 0; i < howMany; ++i) {
        long timestamp = baseTimestamp;
        if (timestampDiffSize > 0) {
            timestamp += randomizer.nextInt(timestampDiffSize);
        }
        Integer rowId = randomizer.nextInt(rows.size());
        byte[] row = rows.get(rowId);
        // generate qualifier, sometimes it is same, sometimes similar,
        // occasionally completely different
        byte[] qualifier;
        float qualifierChance = randomizer.nextFloat();
        if (!rowsToQualifier.containsKey(rowId) || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
            int qualifierLength = averageQualifierLength;
            qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1) - qualifierLengthVariance;
            qualifier = new byte[qualifierLength];
            randomizer.nextBytes(qualifier);
            // add it to map
            if (!rowsToQualifier.containsKey(rowId)) {
                rowsToQualifier.put(rowId, new ArrayList<>());
            }
            rowsToQualifier.get(rowId).add(qualifier);
        } else if (qualifierChance > chanceForSameQualifier) {
            // similar qualifier
            List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
            byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
            qualifier = new byte[originalQualifier.length];
            int commonPrefix = randomizer.nextInt(qualifier.length);
            System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
            for (int j = commonPrefix; j < qualifier.length; ++j) {
                qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
            }
            rowsToQualifier.get(rowId).add(qualifier);
        } else {
            // same qualifier
            List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
            qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
        }
        if (randomizer.nextFloat() < chanceForZeroValue) {
            for (int j = 0; j < value.length; ++j) {
                value[j] = (byte) 0;
            }
        } else {
            randomizer.nextBytes(value);
        }
        if (useTags) {
            KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new ArrayBackedTag((byte) 1, "value1") });
            ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
            ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(), keyValue.getOffset(), keyValue.getLength());
            ByteBufferKeyValue offheapKV = new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
            result.add(offheapKV);
        } else {
            KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value);
            ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
            ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(), keyValue.getOffset(), keyValue.getLength());
            ByteBufferKeyValue offheapKV = new ExtendedOffheapKeyValue(offheapKVBB, 0, keyValue.getLength(), 0);
            result.add(offheapKV);
        }
    }
    Collections.sort(result, CellComparator.COMPARATOR);
    return result;
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ByteBuffer(java.nio.ByteBuffer) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) ArrayList(java.util.ArrayList) List(java.util.List) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)37 KeyValue (org.apache.hadoop.hbase.KeyValue)31 Tag (org.apache.hadoop.hbase.Tag)31 ArrayList (java.util.ArrayList)23 Cell (org.apache.hadoop.hbase.Cell)14 Test (org.junit.Test)9 Put (org.apache.hadoop.hbase.client.Put)7 IOException (java.io.IOException)6 ByteBufferKeyValue (org.apache.hadoop.hbase.ByteBufferKeyValue)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 DataOutputStream (java.io.DataOutputStream)4 Path (org.apache.hadoop.fs.Path)4 ByteArrayInputStream (java.io.ByteArrayInputStream)3 DataInputStream (java.io.DataInputStream)3 ByteBuffer (java.nio.ByteBuffer)3 HashMap (java.util.HashMap)3 List (java.util.List)3 Random (java.util.Random)3 CountingInputStream (com.google.common.io.CountingInputStream)2 CountingOutputStream (com.google.common.io.CountingOutputStream)2