Search in sources :

Example 6 with TDelete

use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.

the class TestThriftHBaseServiceHandler method testDeleteSingleTimestamp.

@Test
public void testDeleteSingleTimestamp() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    byte[] rowName = "testDeleteSingleTimestamp".getBytes();
    ByteBuffer table = wrap(tableAname);
    long timestamp1 = System.currentTimeMillis() - 10;
    long timestamp2 = System.currentTimeMillis();
    List<TColumnValue> columnValues = new ArrayList<>(1);
    TColumnValue columnValueA = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    columnValueA.setTimestamp(timestamp1);
    columnValues.add(columnValueA);
    TPut put = new TPut(wrap(rowName), columnValues);
    put.setColumnValues(columnValues);
    handler.put(table, put);
    columnValueA.setTimestamp(timestamp2);
    handler.put(table, put);
    TGet get = new TGet(wrap(rowName));
    get.setMaxVersions(2);
    TResult result = handler.get(table, get);
    assertEquals(2, result.getColumnValuesSize());
    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<>(1);
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);
    delete.setDeleteType(TDeleteType.DELETE_COLUMN);
    handler.deleteSingle(table, delete);
    get = new TGet(wrap(rowName));
    result = handler.get(table, get);
    assertArrayEquals(rowName, result.getRow());
    assertEquals(1, result.getColumnValuesSize());
    // the older timestamp should remain.
    assertEquals(timestamp1, result.getColumnValues().get(0).getTimestamp());
}
Also used : TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 7 with TDelete

use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.

the class ThriftUtilities method deleteFromHBase.

public static TDelete deleteFromHBase(Delete in) {
    TDelete out = new TDelete(ByteBuffer.wrap(in.getRow()));
    List<TColumn> columns = new ArrayList<>(in.getFamilyCellMap().entrySet().size());
    long rowTimestamp = in.getTimeStamp();
    if (rowTimestamp != HConstants.LATEST_TIMESTAMP) {
        out.setTimestamp(rowTimestamp);
    }
    // Map<family, List<KeyValue>>
    for (Map.Entry<byte[], List<org.apache.hadoop.hbase.Cell>> familyEntry : in.getFamilyCellMap().entrySet()) {
        TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
        for (org.apache.hadoop.hbase.Cell cell : familyEntry.getValue()) {
            byte[] family = CellUtil.cloneFamily(cell);
            byte[] qualifier = CellUtil.cloneQualifier(cell);
            long timestamp = cell.getTimestamp();
            if (family != null) {
                column.setFamily(family);
            }
            if (qualifier != null) {
                column.setQualifier(qualifier);
            }
            if (timestamp != HConstants.LATEST_TIMESTAMP) {
                column.setTimestamp(timestamp);
            }
        }
        columns.add(column);
    }
    out.setColumns(columns);
    return out;
}
Also used : TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Cell(org.apache.hadoop.hbase.Cell)

Example 8 with TDelete

use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.

the class TestThriftHBaseServiceHandler method testAttribute.

@Test
public void testAttribute() throws Exception {
    byte[] rowName = "testAttribute".getBytes();
    byte[] attributeKey = "attribute1".getBytes();
    byte[] attributeValue = "value1".getBytes();
    Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
    attributes.put(wrap(attributeKey), wrap(attributeValue));
    TGet tGet = new TGet(wrap(rowName));
    tGet.setAttributes(attributes);
    Get get = getFromThrift(tGet);
    assertArrayEquals(get.getAttribute("attribute1"), attributeValue);
    List<TColumnValue> columnValues = new ArrayList<>(1);
    columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
    TPut tPut = new TPut(wrap(rowName), columnValues);
    tPut.setAttributes(attributes);
    Put put = putFromThrift(tPut);
    assertArrayEquals(put.getAttribute("attribute1"), attributeValue);
    TScan tScan = new TScan();
    tScan.setAttributes(attributes);
    Scan scan = scanFromThrift(tScan);
    assertArrayEquals(scan.getAttribute("attribute1"), attributeValue);
    List<TColumnIncrement> incrementColumns = new ArrayList<>(1);
    incrementColumns.add(new TColumnIncrement(wrap(familyAname), wrap(qualifierAname)));
    TIncrement tIncrement = new TIncrement(wrap(rowName), incrementColumns);
    tIncrement.setAttributes(attributes);
    Increment increment = incrementFromThrift(tIncrement);
    assertArrayEquals(increment.getAttribute("attribute1"), attributeValue);
    TDelete tDelete = new TDelete(wrap(rowName));
    tDelete.setAttributes(attributes);
    Delete delete = deleteFromThrift(tDelete);
    assertArrayEquals(delete.getAttribute("attribute1"), attributeValue);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) HashMap(java.util.HashMap) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TIncrement(org.apache.hadoop.hbase.thrift2.generated.TIncrement) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Put(org.apache.hadoop.hbase.client.Put) Get(org.apache.hadoop.hbase.client.Get) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) TColumnIncrement(org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement) TIncrement(org.apache.hadoop.hbase.thrift2.generated.TIncrement) Increment(org.apache.hadoop.hbase.client.Increment) TColumnIncrement(org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) Scan(org.apache.hadoop.hbase.client.Scan) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 9 with TDelete

use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.

the class TestThriftHBaseServiceHandler method testDeleteMultiple.

@Test
public void testDeleteMultiple() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    ByteBuffer table = wrap(tableAname);
    byte[] rowName1 = "testDeleteMultiple1".getBytes();
    byte[] rowName2 = "testDeleteMultiple2".getBytes();
    List<TColumnValue> columnValues = new ArrayList<>(2);
    columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
    columnValues.add(new TColumnValue(wrap(familyBname), wrap(qualifierBname), wrap(valueBname)));
    List<TPut> puts = new ArrayList<>(2);
    puts.add(new TPut(wrap(rowName1), columnValues));
    puts.add(new TPut(wrap(rowName2), columnValues));
    handler.putMultiple(table, puts);
    List<TDelete> deletes = new ArrayList<>(2);
    deletes.add(new TDelete(wrap(rowName1)));
    deletes.add(new TDelete(wrap(rowName2)));
    List<TDelete> deleteResults = handler.deleteMultiple(table, deletes);
    // 0 means they were all successfully applies
    assertEquals(0, deleteResults.size());
    assertFalse(handler.exists(table, new TGet(wrap(rowName1))));
    assertFalse(handler.exists(table, new TGet(wrap(rowName2))));
}
Also used : TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Aggregations

ArrayList (java.util.ArrayList)9 TDelete (org.apache.hadoop.hbase.thrift2.generated.TDelete)9 TColumnValue (org.apache.hadoop.hbase.thrift2.generated.TColumnValue)8 TPut (org.apache.hadoop.hbase.thrift2.generated.TPut)8 Test (org.junit.Test)8 ByteBuffer (java.nio.ByteBuffer)7 TGet (org.apache.hadoop.hbase.thrift2.generated.TGet)7 TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)5 TResult (org.apache.hadoop.hbase.thrift2.generated.TResult)5 Delete (org.apache.hadoop.hbase.client.Delete)2 Increment (org.apache.hadoop.hbase.client.Increment)2 Put (org.apache.hadoop.hbase.client.Put)2 TColumnIncrement (org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement)2 TIncrement (org.apache.hadoop.hbase.thrift2.generated.TIncrement)2 HashMap (java.util.HashMap)1 List (java.util.List)1 Map (java.util.Map)1 Cell (org.apache.hadoop.hbase.Cell)1 Get (org.apache.hadoop.hbase.client.Get)1 Scan (org.apache.hadoop.hbase.client.Scan)1