use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.
the class TestThriftHBaseServiceHandler method testScan.
@Test
public void testScan() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(columnValue);
for (int i = 0; i < 10; i++) {
TPut put = new TPut(wrap(Bytes.toBytes("testScan" + i)), columnValues);
handler.put(table, put);
}
// create scan instance
TScan scan = new TScan();
List<TColumn> columns = new ArrayList<>(1);
TColumn column = new TColumn();
column.setFamily(familyAname);
column.setQualifier(qualifierAname);
columns.add(column);
scan.setColumns(columns);
scan.setStartRow(Bytes.toBytes("testScan"));
scan.setStopRow(Bytes.toBytes("testScan\uffff"));
// get scanner and rows
int scanId = handler.openScanner(table, scan);
List<TResult> results = handler.getScannerRows(scanId, 10);
assertEquals(10, results.size());
for (int i = 0; i < 10; i++) {
// check if the rows are returned and in order
assertArrayEquals(Bytes.toBytes("testScan" + i), results.get(i).getRow());
}
// check that we are at the end of the scan
results = handler.getScannerRows(scanId, 10);
assertEquals(0, results.size());
// close scanner and check that it was indeed closed
handler.closeScanner(scanId);
try {
handler.getScannerRows(scanId, 10);
fail("Scanner id should be invalid");
} catch (TIllegalArgument e) {
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.
the class ThriftUtilities method scanFromThrift.
public static Scan scanFromThrift(TScan in) throws IOException {
Scan out = new Scan();
if (in.isSetStartRow()) {
out.withStartRow(in.getStartRow());
}
if (in.isSetStopRow()) {
out.withStopRow(in.getStopRow());
}
if (in.isSetCaching()) {
out.setCaching(in.getCaching());
}
if (in.isSetMaxVersions()) {
out.readVersions(in.getMaxVersions());
}
if (in.isSetColumns()) {
for (TColumn column : in.getColumns()) {
if (column.isSetQualifier()) {
out.addColumn(column.getFamily(), column.getQualifier());
} else {
out.addFamily(column.getFamily());
}
}
}
TTimeRange timeRange = in.getTimeRange();
if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
}
if (in.isSetBatchSize()) {
out.setBatch(in.getBatchSize());
}
if (in.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
}
if (in.isSetAttributes()) {
addAttributes(out, in.getAttributes());
}
if (in.isSetAuthorizations()) {
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
}
if (in.isSetReversed()) {
out.setReversed(in.isReversed());
}
if (in.isSetCacheBlocks()) {
out.setCacheBlocks(in.isCacheBlocks());
}
if (in.isSetColFamTimeRangeMap()) {
Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
}
}
}
if (in.isSetReadType()) {
out.setReadType(readTypeFromThrift(in.getReadType()));
}
if (in.isSetLimit()) {
out.setLimit(in.getLimit());
}
if (in.isSetConsistency()) {
out.setConsistency(consistencyFromThrift(in.getConsistency()));
}
if (in.isSetTargetReplicaId()) {
out.setReplicaId(in.getTargetReplicaId());
}
if (in.isSetFilterBytes()) {
out.setFilter(filterFromThrift(in.getFilterBytes()));
}
return out;
}
use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.
the class ThriftUtilities method getFromHBase.
public static TGet getFromHBase(Get in) {
TGet out = new TGet();
out.setRow(in.getRow());
TTimeRange tTimeRange = new TTimeRange();
tTimeRange.setMaxStamp(in.getTimeRange().getMax()).setMinStamp(in.getTimeRange().getMin());
out.setTimeRange(tTimeRange);
out.setMaxVersions(in.getMaxVersions());
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), ByteBuffer.wrap(attribute.getValue()));
}
try {
Authorizations authorizations = in.getAuthorizations();
if (authorizations != null) {
TAuthorization tAuthorization = new TAuthorization();
tAuthorization.setLabels(authorizations.getLabels());
out.setAuthorizations(tAuthorization);
}
} catch (DeserializationException e) {
throw new RuntimeException(e);
}
out.setConsistency(consistencyFromHBase(in.getConsistency()));
out.setTargetReplicaId(in.getReplicaId());
out.setCacheBlocks(in.getCacheBlocks());
out.setStoreLimit(in.getMaxResultsPerColumnFamily());
out.setStoreOffset(in.getRowOffsetPerColumnFamily());
out.setExistence_only(in.isCheckExistenceOnly());
for (Map.Entry<byte[], NavigableSet<byte[]>> family : in.getFamilyMap().entrySet()) {
if (family.getValue() != null && !family.getValue().isEmpty()) {
for (byte[] qualifier : family.getValue()) {
TColumn column = new TColumn();
column.setFamily(family.getKey());
column.setQualifier(qualifier);
out.addToColumns(column);
}
} else {
TColumn column = new TColumn();
column.setFamily(family.getKey());
out.addToColumns(column);
}
}
if (in.getFilter() != null) {
try {
out.setFilterBytes(filterFromHBase(in.getFilter()));
} catch (IOException ioE) {
throw new RuntimeException(ioE);
}
}
return out;
}
use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.
the class ThriftUtilities method deleteFromHBase.
public static TDelete deleteFromHBase(Delete in) {
TDelete out = new TDelete(ByteBuffer.wrap(in.getRow()));
List<TColumn> columns = new ArrayList<>(in.getFamilyCellMap().entrySet().size());
long rowTimestamp = in.getTimestamp();
if (rowTimestamp != HConstants.LATEST_TIMESTAMP) {
out.setTimestamp(rowTimestamp);
}
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), ByteBuffer.wrap(attribute.getValue()));
}
if (in.getDurability() != Durability.USE_DEFAULT) {
out.setDurability(durabilityFromHBase(in.getDurability()));
}
// Delete the whole row
if (in.getFamilyCellMap().size() == 0) {
return out;
}
TDeleteType type = null;
for (Map.Entry<byte[], List<Cell>> familyEntry : in.getFamilyCellMap().entrySet()) {
byte[] family = familyEntry.getKey();
TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
for (Cell cell : familyEntry.getValue()) {
TDeleteType cellDeleteType = deleteTypeFromHBase(cell.getType());
if (type == null) {
type = cellDeleteType;
} else if (type != cellDeleteType) {
throw new RuntimeException("Only the same delete type is supported, but two delete type " + "is founded, one is " + type + " the other one is " + cellDeleteType);
}
byte[] qualifier = CellUtil.cloneQualifier(cell);
long timestamp = cell.getTimestamp();
column.setFamily(family);
if (qualifier != null) {
column.setQualifier(qualifier);
}
if (timestamp != HConstants.LATEST_TIMESTAMP) {
column.setTimestamp(timestamp);
}
}
columns.add(column);
}
out.setColumns(columns);
out.setDeleteType(type);
return out;
}
Aggregations