use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.
the class TestThriftHBaseServiceHandler method testDeleteSingleTimestamp.
@Test
public void testDeleteSingleTimestamp() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testDeleteSingleTimestamp".getBytes();
ByteBuffer table = wrap(tableAname);
long timestamp1 = System.currentTimeMillis() - 10;
long timestamp2 = System.currentTimeMillis();
List<TColumnValue> columnValues = new ArrayList<>(1);
TColumnValue columnValueA = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
columnValueA.setTimestamp(timestamp1);
columnValues.add(columnValueA);
TPut put = new TPut(wrap(rowName), columnValues);
put.setColumnValues(columnValues);
handler.put(table, put);
columnValueA.setTimestamp(timestamp2);
handler.put(table, put);
TGet get = new TGet(wrap(rowName));
get.setMaxVersions(2);
TResult result = handler.get(table, get);
assertEquals(2, result.getColumnValuesSize());
TDelete delete = new TDelete(wrap(rowName));
List<TColumn> deleteColumns = new ArrayList<>(1);
TColumn deleteColumn = new TColumn(wrap(familyAname));
deleteColumn.setQualifier(qualifierAname);
deleteColumns.add(deleteColumn);
delete.setColumns(deleteColumns);
delete.setDeleteType(TDeleteType.DELETE_COLUMN);
handler.deleteSingle(table, delete);
get = new TGet(wrap(rowName));
result = handler.get(table, get);
assertArrayEquals(rowName, result.getRow());
assertEquals(1, result.getColumnValuesSize());
// the older timestamp should remain.
assertEquals(timestamp1, result.getColumnValues().get(0).getTimestamp());
}
use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.
the class ThriftUtilities method deleteFromHBase.
public static TDelete deleteFromHBase(Delete in) {
TDelete out = new TDelete(ByteBuffer.wrap(in.getRow()));
List<TColumn> columns = new ArrayList<>(in.getFamilyCellMap().entrySet().size());
long rowTimestamp = in.getTimeStamp();
if (rowTimestamp != HConstants.LATEST_TIMESTAMP) {
out.setTimestamp(rowTimestamp);
}
// Map<family, List<KeyValue>>
for (Map.Entry<byte[], List<org.apache.hadoop.hbase.Cell>> familyEntry : in.getFamilyCellMap().entrySet()) {
TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
for (org.apache.hadoop.hbase.Cell cell : familyEntry.getValue()) {
byte[] family = CellUtil.cloneFamily(cell);
byte[] qualifier = CellUtil.cloneQualifier(cell);
long timestamp = cell.getTimestamp();
if (family != null) {
column.setFamily(family);
}
if (qualifier != null) {
column.setQualifier(qualifier);
}
if (timestamp != HConstants.LATEST_TIMESTAMP) {
column.setTimestamp(timestamp);
}
}
columns.add(column);
}
out.setColumns(columns);
return out;
}
use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.
the class TestThriftHBaseServiceHandler method testAttribute.
@Test
public void testAttribute() throws Exception {
byte[] rowName = "testAttribute".getBytes();
byte[] attributeKey = "attribute1".getBytes();
byte[] attributeValue = "value1".getBytes();
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
attributes.put(wrap(attributeKey), wrap(attributeValue));
TGet tGet = new TGet(wrap(rowName));
tGet.setAttributes(attributes);
Get get = getFromThrift(tGet);
assertArrayEquals(get.getAttribute("attribute1"), attributeValue);
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
TPut tPut = new TPut(wrap(rowName), columnValues);
tPut.setAttributes(attributes);
Put put = putFromThrift(tPut);
assertArrayEquals(put.getAttribute("attribute1"), attributeValue);
TScan tScan = new TScan();
tScan.setAttributes(attributes);
Scan scan = scanFromThrift(tScan);
assertArrayEquals(scan.getAttribute("attribute1"), attributeValue);
List<TColumnIncrement> incrementColumns = new ArrayList<>(1);
incrementColumns.add(new TColumnIncrement(wrap(familyAname), wrap(qualifierAname)));
TIncrement tIncrement = new TIncrement(wrap(rowName), incrementColumns);
tIncrement.setAttributes(attributes);
Increment increment = incrementFromThrift(tIncrement);
assertArrayEquals(increment.getAttribute("attribute1"), attributeValue);
TDelete tDelete = new TDelete(wrap(rowName));
tDelete.setAttributes(attributes);
Delete delete = deleteFromThrift(tDelete);
assertArrayEquals(delete.getAttribute("attribute1"), attributeValue);
}
use of org.apache.hadoop.hbase.thrift2.generated.TDelete in project hbase by apache.
the class TestThriftHBaseServiceHandler method testDeleteMultiple.
@Test
public void testDeleteMultiple() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
byte[] rowName1 = "testDeleteMultiple1".getBytes();
byte[] rowName2 = "testDeleteMultiple2".getBytes();
List<TColumnValue> columnValues = new ArrayList<>(2);
columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
columnValues.add(new TColumnValue(wrap(familyBname), wrap(qualifierBname), wrap(valueBname)));
List<TPut> puts = new ArrayList<>(2);
puts.add(new TPut(wrap(rowName1), columnValues));
puts.add(new TPut(wrap(rowName2), columnValues));
handler.putMultiple(table, puts);
List<TDelete> deletes = new ArrayList<>(2);
deletes.add(new TDelete(wrap(rowName1)));
deletes.add(new TDelete(wrap(rowName2)));
List<TDelete> deleteResults = handler.deleteMultiple(table, deletes);
// 0 means they were all successfully applies
assertEquals(0, deleteResults.size());
assertFalse(handler.exists(table, new TGet(wrap(rowName1))));
assertFalse(handler.exists(table, new TGet(wrap(rowName2))));
}
Aggregations