use of org.apache.hadoop.hbase.thrift2.generated.TTimeRange in project hbase by apache.
the class TestThriftHBaseServiceHandler method testScanWithColumnFamilyTimeRange.
@Test
public void testScanWithColumnFamilyTimeRange() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
TColumnValue familyAColumnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
TColumnValue familyBColumnValue = new TColumnValue(wrap(familyBname), wrap(qualifierBname), wrap(valueBname));
long minTimestamp = System.currentTimeMillis();
for (int i = 0; i < 10; i++) {
familyAColumnValue.setTimestamp(minTimestamp + i);
familyBColumnValue.setTimestamp(minTimestamp + i);
List<TColumnValue> columnValues = new ArrayList<>(2);
columnValues.add(familyAColumnValue);
columnValues.add(familyBColumnValue);
TPut put = new TPut(wrap(("testScanWithColumnFamilyTimeRange" + i).getBytes()), columnValues);
handler.put(table, put);
}
// create scan instance with column family time range
TScan scan = new TScan();
Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = new HashMap<>(2);
colFamTimeRangeMap.put(wrap(familyAname), new TTimeRange(minTimestamp + 3, minTimestamp + 5));
colFamTimeRangeMap.put(wrap(familyBname), new TTimeRange(minTimestamp + 6, minTimestamp + 9));
scan.setColFamTimeRangeMap(colFamTimeRangeMap);
// get scanner and rows
int scanId = handler.openScanner(table, scan);
List<TResult> results = handler.getScannerRows(scanId, 5);
assertEquals(5, results.size());
int familyACount = 0;
int familyBCount = 0;
for (TResult result : results) {
List<TColumnValue> columnValues = result.getColumnValues();
if (CollectionUtils.isNotEmpty(columnValues)) {
if (Bytes.equals(familyAname, columnValues.get(0).getFamily())) {
familyACount++;
} else if (Bytes.equals(familyBname, columnValues.get(0).getFamily())) {
familyBCount++;
}
}
}
assertEquals(2, familyACount);
assertEquals(3, familyBCount);
// check that we are at the end of the scan
results = handler.getScannerRows(scanId, 1);
assertEquals(0, results.size());
// close scanner and check that it was indeed closed
handler.closeScanner(scanId);
try {
handler.getScannerRows(scanId, 1);
fail("Scanner id should be invalid");
} catch (TIllegalArgument e) {
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TTimeRange in project hbase by apache.
the class ThriftUtilities method scanFromThrift.
public static Scan scanFromThrift(TScan in) throws IOException {
Scan out = new Scan();
if (in.isSetStartRow())
out.setStartRow(in.getStartRow());
if (in.isSetStopRow())
out.setStopRow(in.getStopRow());
if (in.isSetCaching())
out.setCaching(in.getCaching());
if (in.isSetMaxVersions()) {
out.setMaxVersions(in.getMaxVersions());
}
if (in.isSetColumns()) {
for (TColumn column : in.getColumns()) {
if (column.isSetQualifier()) {
out.addColumn(column.getFamily(), column.getQualifier());
} else {
out.addFamily(column.getFamily());
}
}
}
TTimeRange timeRange = in.getTimeRange();
if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
}
if (in.isSetBatchSize()) {
out.setBatch(in.getBatchSize());
}
if (in.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
}
if (in.isSetAttributes()) {
addAttributes(out, in.getAttributes());
}
if (in.isSetAuthorizations()) {
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
}
if (in.isSetReversed()) {
out.setReversed(in.isReversed());
}
if (in.isSetCacheBlocks()) {
out.setCacheBlocks(in.isCacheBlocks());
}
if (in.isSetColFamTimeRangeMap()) {
Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
}
}
}
return out;
}
Aggregations