Search in sources :

Example 26 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class Delete method addColumn.

/**
   * Delete the specified version of the specified column.
   * @param family family name
   * @param qualifier column qualifier
   * @param timestamp version timestamp
   * @return this for invocation chaining
   */
public Delete addColumn(byte[] family, byte[] qualifier, long timestamp) {
    if (timestamp < 0) {
        throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
    }
    List<Cell> list = familyMap.get(family);
    if (list == null) {
        list = new ArrayList<>(1);
    }
    KeyValue kv = new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.Delete);
    list.add(kv);
    familyMap.put(family, list);
    return this;
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Cell(org.apache.hadoop.hbase.Cell)

Example 27 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class Put method addColumn.

/**
   * Add the specified column and value, with the specified timestamp as
   * its version to this Put operation.
   * @param family family name
   * @param qualifier column qualifier
   * @param ts version timestamp
   * @param value column value
   * @return this
   */
public Put addColumn(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
    if (ts < 0) {
        throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
    }
    List<Cell> list = getCellList(family);
    KeyValue kv = createPutKeyValue(family, qualifier, ts, value, null);
    list.add(kv);
    familyMap.put(CellUtil.cloneFamily(kv), list);
    return this;
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) IndividualBytesFieldCell(org.apache.hadoop.hbase.IndividualBytesFieldCell) Cell(org.apache.hadoop.hbase.Cell)

Example 28 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class RedundantKVGenerator method generateTestKeyValues.

/**
   * Generate test data useful to test encoders.
   * @param howMany How many Key values should be generated.
   * @return sorted list of key values
   */
public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
    List<KeyValue> result = new ArrayList<>();
    List<byte[]> rows = generateRows();
    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
    if (family == null) {
        family = new byte[columnFamilyLength];
        randomizer.nextBytes(family);
    }
    long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
    byte[] value = new byte[valueLength];
    for (int i = 0; i < howMany; ++i) {
        long timestamp = baseTimestamp;
        if (timestampDiffSize > 0) {
            timestamp += randomizer.nextInt(timestampDiffSize);
        }
        Integer rowId = randomizer.nextInt(rows.size());
        byte[] row = rows.get(rowId);
        // generate qualifier, sometimes it is same, sometimes similar,
        // occasionally completely different
        byte[] qualifier;
        float qualifierChance = randomizer.nextFloat();
        if (!rowsToQualifier.containsKey(rowId) || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
            int qualifierLength = averageQualifierLength;
            qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1) - qualifierLengthVariance;
            qualifier = new byte[qualifierLength];
            randomizer.nextBytes(qualifier);
            // add it to map
            if (!rowsToQualifier.containsKey(rowId)) {
                rowsToQualifier.put(rowId, new ArrayList<>());
            }
            rowsToQualifier.get(rowId).add(qualifier);
        } else if (qualifierChance > chanceForSameQualifier) {
            // similar qualifier
            List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
            byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
            qualifier = new byte[originalQualifier.length];
            int commonPrefix = randomizer.nextInt(qualifier.length);
            System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
            for (int j = commonPrefix; j < qualifier.length; ++j) {
                qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
            }
            rowsToQualifier.get(rowId).add(qualifier);
        } else {
            // same qualifier
            List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
            qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
        }
        if (randomizer.nextFloat() < chanceForZeroValue) {
            for (int j = 0; j < value.length; ++j) {
                value[j] = (byte) 0;
            }
        } else {
            randomizer.nextBytes(value);
        }
        if (useTags) {
            result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
        } else {
            result.add(new KeyValue(row, family, qualifier, timestamp, value));
        }
    }
    Collections.sort(result, CellComparator.COMPARATOR);
    return result;
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ArrayList(java.util.ArrayList) List(java.util.List) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag)

Example 29 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class RedundantKVGenerator method convertKvToByteBuffer.

/**
   * Convert list of KeyValues to byte buffer.
   * @param keyValues list of KeyValues to be converted.
   * @return buffer with content from key values
   */
public static ByteBuffer convertKvToByteBuffer(List<KeyValue> keyValues, boolean includesMemstoreTS) {
    int totalSize = 0;
    for (KeyValue kv : keyValues) {
        totalSize += kv.getLength();
        if (includesMemstoreTS) {
            totalSize += WritableUtils.getVIntSize(kv.getSequenceId());
        }
    }
    ByteBuffer result = ByteBuffer.allocate(totalSize);
    for (KeyValue kv : keyValues) {
        result.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
        if (includesMemstoreTS) {
            ByteBufferUtils.writeVLong(result, kv.getSequenceId());
        }
    }
    return result;
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) ByteBuffer(java.nio.ByteBuffer)

Example 30 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestCellCodecWithTags method testCellWithTag.

@Test
public void testCellWithTag() throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    CountingOutputStream cos = new CountingOutputStream(baos);
    DataOutputStream dos = new DataOutputStream(cos);
    Codec codec = new CellCodecWithTags();
    Codec.Encoder encoder = codec.getEncoder(dos);
    final Cell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) });
    final Cell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring3")) });
    final Cell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] { new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) });
    encoder.write(cell1);
    encoder.write(cell2);
    encoder.write(cell3);
    encoder.flush();
    dos.close();
    long offset = cos.getCount();
    CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
    DataInputStream dis = new DataInputStream(cis);
    Codec.Decoder decoder = codec.getDecoder(dis);
    assertTrue(decoder.advance());
    Cell c = decoder.current();
    assertTrue(CellUtil.equals(c, cell1));
    List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
    assertEquals(2, tags.size());
    Tag tag = tags.get(0);
    assertEquals(1, tag.getType());
    assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag)));
    tag = tags.get(1);
    assertEquals(2, tag.getType());
    assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag)));
    assertTrue(decoder.advance());
    c = decoder.current();
    assertTrue(CellUtil.equals(c, cell2));
    tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
    assertEquals(1, tags.size());
    tag = tags.get(0);
    assertEquals(1, tag.getType());
    assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag)));
    assertTrue(decoder.advance());
    c = decoder.current();
    assertTrue(CellUtil.equals(c, cell3));
    tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
    assertEquals(3, tags.size());
    tag = tags.get(0);
    assertEquals(2, tag.getType());
    assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag)));
    tag = tags.get(1);
    assertEquals(2, tag.getType());
    assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag)));
    tag = tags.get(2);
    assertEquals(1, tag.getType());
    assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag)));
    assertFalse(decoder.advance());
    dis.close();
    assertEquals(offset, cis.getCount());
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) DataOutputStream(java.io.DataOutputStream) CountingInputStream(com.google.common.io.CountingInputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) DataInputStream(java.io.DataInputStream) CountingOutputStream(com.google.common.io.CountingOutputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

KeyValue (org.apache.hadoop.hbase.KeyValue)552 Test (org.junit.Test)289 Cell (org.apache.hadoop.hbase.Cell)193 ArrayList (java.util.ArrayList)172 Put (org.apache.hadoop.hbase.client.Put)98 Scan (org.apache.hadoop.hbase.client.Scan)85 Result (org.apache.hadoop.hbase.client.Result)70 Configuration (org.apache.hadoop.conf.Configuration)64 Path (org.apache.hadoop.fs.Path)55 ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)36 Tag (org.apache.hadoop.hbase.Tag)35 ByteBuffer (java.nio.ByteBuffer)34 List (java.util.List)34 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)34 IOException (java.io.IOException)32 TableName (org.apache.hadoop.hbase.TableName)32 TreeMap (java.util.TreeMap)29 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)28 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)28 WALEdit (org.apache.hadoop.hbase.regionserver.wal.WALEdit)27