use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.
the class TestThriftHBaseServiceHandler method testScanWithColumnFamilyTimeRange.
@Test
public void testScanWithColumnFamilyTimeRange() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
TColumnValue familyAColumnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
TColumnValue familyBColumnValue = new TColumnValue(wrap(familyBname), wrap(qualifierBname), wrap(valueBname));
long minTimestamp = System.currentTimeMillis();
for (int i = 0; i < 10; i++) {
familyAColumnValue.setTimestamp(minTimestamp + i);
familyBColumnValue.setTimestamp(minTimestamp + i);
List<TColumnValue> columnValues = new ArrayList<>(2);
columnValues.add(familyAColumnValue);
columnValues.add(familyBColumnValue);
TPut put = new TPut(wrap(("testScanWithColumnFamilyTimeRange" + i).getBytes()), columnValues);
handler.put(table, put);
}
// create scan instance with column family time range
TScan scan = new TScan();
Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = new HashMap<>(2);
colFamTimeRangeMap.put(wrap(familyAname), new TTimeRange(minTimestamp + 3, minTimestamp + 5));
colFamTimeRangeMap.put(wrap(familyBname), new TTimeRange(minTimestamp + 6, minTimestamp + 9));
scan.setColFamTimeRangeMap(colFamTimeRangeMap);
// get scanner and rows
int scanId = handler.openScanner(table, scan);
List<TResult> results = handler.getScannerRows(scanId, 5);
assertEquals(5, results.size());
int familyACount = 0;
int familyBCount = 0;
for (TResult result : results) {
List<TColumnValue> columnValues = result.getColumnValues();
if (CollectionUtils.isNotEmpty(columnValues)) {
if (Bytes.equals(familyAname, columnValues.get(0).getFamily())) {
familyACount++;
} else if (Bytes.equals(familyBname, columnValues.get(0).getFamily())) {
familyBCount++;
}
}
}
assertEquals(2, familyACount);
assertEquals(3, familyBCount);
// check that we are at the end of the scan
results = handler.getScannerRows(scanId, 1);
assertEquals(0, results.size());
// close scanner and check that it was indeed closed
handler.closeScanner(scanId);
try {
handler.getScannerRows(scanId, 1);
fail("Scanner id should be invalid");
} catch (TIllegalArgument e) {
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.
the class TestThriftHBaseServiceHandler method testScan.
@Test
public void testScan() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(columnValue);
for (int i = 0; i < 10; i++) {
TPut put = new TPut(wrap(("testScan" + i).getBytes()), columnValues);
handler.put(table, put);
}
// create scan instance
TScan scan = new TScan();
List<TColumn> columns = new ArrayList<>(1);
TColumn column = new TColumn();
column.setFamily(familyAname);
column.setQualifier(qualifierAname);
columns.add(column);
scan.setColumns(columns);
scan.setStartRow("testScan".getBytes());
scan.setStopRow("testScan".getBytes());
// get scanner and rows
int scanId = handler.openScanner(table, scan);
List<TResult> results = handler.getScannerRows(scanId, 10);
assertEquals(10, results.size());
for (int i = 0; i < 10; i++) {
// check if the rows are returned and in order
assertArrayEquals(("testScan" + i).getBytes(), results.get(i).getRow());
}
// check that we are at the end of the scan
results = handler.getScannerRows(scanId, 10);
assertEquals(0, results.size());
// close scanner and check that it was indeed closed
handler.closeScanner(scanId);
try {
handler.getScannerRows(scanId, 10);
fail("Scanner id should be invalid");
} catch (TIllegalArgument e) {
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.
the class TestThriftHBaseServiceHandler method testReverseScan.
@Test
public void testReverseScan() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(columnValue);
for (int i = 0; i < 10; i++) {
TPut put = new TPut(wrap(("testReverseScan" + i).getBytes()), columnValues);
handler.put(table, put);
}
// create reverse scan instance
TScan scan = new TScan();
scan.setReversed(true);
List<TColumn> columns = new ArrayList<>(1);
TColumn column = new TColumn();
column.setFamily(familyAname);
column.setQualifier(qualifierAname);
columns.add(column);
scan.setColumns(columns);
scan.setStartRow("testReverseScan".getBytes());
scan.setStopRow("testReverseScan".getBytes());
// get scanner and rows
int scanId = handler.openScanner(table, scan);
List<TResult> results = handler.getScannerRows(scanId, 10);
assertEquals(10, results.size());
for (int i = 0; i < 10; i++) {
// check if the rows are returned and in order
assertArrayEquals(("testReverseScan" + (9 - i)).getBytes(), results.get(i).getRow());
}
// check that we are at the end of the scan
results = handler.getScannerRows(scanId, 10);
assertEquals(0, results.size());
// close scanner and check that it was indeed closed
handler.closeScanner(scanId);
try {
handler.getScannerRows(scanId, 10);
fail("Scanner id should be invalid");
} catch (TIllegalArgument e) {
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.
the class TestThriftHBaseServiceHandler method testAttribute.
@Test
public void testAttribute() throws Exception {
byte[] rowName = "testAttribute".getBytes();
byte[] attributeKey = "attribute1".getBytes();
byte[] attributeValue = "value1".getBytes();
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
attributes.put(wrap(attributeKey), wrap(attributeValue));
TGet tGet = new TGet(wrap(rowName));
tGet.setAttributes(attributes);
Get get = getFromThrift(tGet);
assertArrayEquals(get.getAttribute("attribute1"), attributeValue);
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
TPut tPut = new TPut(wrap(rowName), columnValues);
tPut.setAttributes(attributes);
Put put = putFromThrift(tPut);
assertArrayEquals(put.getAttribute("attribute1"), attributeValue);
TScan tScan = new TScan();
tScan.setAttributes(attributes);
Scan scan = scanFromThrift(tScan);
assertArrayEquals(scan.getAttribute("attribute1"), attributeValue);
List<TColumnIncrement> incrementColumns = new ArrayList<>(1);
incrementColumns.add(new TColumnIncrement(wrap(familyAname), wrap(qualifierAname)));
TIncrement tIncrement = new TIncrement(wrap(rowName), incrementColumns);
tIncrement.setAttributes(attributes);
Increment increment = incrementFromThrift(tIncrement);
assertArrayEquals(increment.getAttribute("attribute1"), attributeValue);
TDelete tDelete = new TDelete(wrap(rowName));
tDelete.setAttributes(attributes);
Delete delete = deleteFromThrift(tDelete);
assertArrayEquals(delete.getAttribute("attribute1"), attributeValue);
}
use of org.apache.hadoop.hbase.thrift2.generated.TScan in project hbase by apache.
the class ThriftUtilities method scanFromThrift.
public static Scan scanFromThrift(TScan in) throws IOException {
Scan out = new Scan();
if (in.isSetStartRow())
out.setStartRow(in.getStartRow());
if (in.isSetStopRow())
out.setStopRow(in.getStopRow());
if (in.isSetCaching())
out.setCaching(in.getCaching());
if (in.isSetMaxVersions()) {
out.setMaxVersions(in.getMaxVersions());
}
if (in.isSetColumns()) {
for (TColumn column : in.getColumns()) {
if (column.isSetQualifier()) {
out.addColumn(column.getFamily(), column.getQualifier());
} else {
out.addFamily(column.getFamily());
}
}
}
TTimeRange timeRange = in.getTimeRange();
if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
}
if (in.isSetBatchSize()) {
out.setBatch(in.getBatchSize());
}
if (in.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
}
if (in.isSetAttributes()) {
addAttributes(out, in.getAttributes());
}
if (in.isSetAuthorizations()) {
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
}
if (in.isSetReversed()) {
out.setReversed(in.isReversed());
}
if (in.isSetCacheBlocks()) {
out.setCacheBlocks(in.isCacheBlocks());
}
if (in.isSetColFamTimeRangeMap()) {
Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
}
}
}
return out;
}
Aggregations