use of org.apache.hadoop.hbase.filter.ColumnRangeFilter in project hbase by apache.
the class TestScannersFromClientSide method testGetRowOffset.
/**
* Test from client side for get with rowOffset
*
* @throws Exception
*/
@Test
public void testGetRowOffset() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3);
byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 20);
Table ht = TEST_UTIL.createTable(tableName, FAMILIES);
Get get;
Put put;
Result result;
boolean toLog = true;
List<Cell> kvListExp;
// Insert one CF for row
kvListExp = new ArrayList<>();
put = new Put(ROW);
for (int i = 0; i < 10; i++) {
KeyValue kv = new KeyValue(ROW, FAMILIES[0], QUALIFIERS[i], 1, VALUE);
put.add(kv);
// skipping first two kvs
if (i < 2)
continue;
kvListExp.add(kv);
}
ht.put(put);
//setting offset to 2
get = new Get(ROW);
get.setRowOffsetPerColumnFamily(2);
result = ht.get(get);
verifyResult(result, kvListExp, toLog, "Testing basic setRowOffset");
//setting offset to 20
get = new Get(ROW);
get.setRowOffsetPerColumnFamily(20);
result = ht.get(get);
kvListExp = new ArrayList<>();
verifyResult(result, kvListExp, toLog, "Testing offset > #kvs");
//offset + maxResultPerCF
get = new Get(ROW);
get.setRowOffsetPerColumnFamily(4);
get.setMaxResultsPerColumnFamily(5);
result = ht.get(get);
kvListExp = new ArrayList<>();
for (int i = 4; i < 9; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[i], 1, VALUE));
}
verifyResult(result, kvListExp, toLog, "Testing offset + setMaxResultsPerCF");
// Filters: ColumnRangeFilter
get = new Get(ROW);
get.setRowOffsetPerColumnFamily(1);
get.setFilter(new ColumnRangeFilter(QUALIFIERS[2], true, QUALIFIERS[5], true));
result = ht.get(get);
kvListExp = new ArrayList<>();
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[3], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[4], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[5], 1, VALUE));
verifyResult(result, kvListExp, toLog, "Testing offset with CRF");
// 10 columns for CF2, 10 columns for CF1
for (int j = 2; j > 0; j--) {
put = new Put(ROW);
for (int i = 0; i < 10; i++) {
KeyValue kv = new KeyValue(ROW, FAMILIES[j], QUALIFIERS[i], 1, VALUE);
put.add(kv);
}
ht.put(put);
}
get = new Get(ROW);
get.setRowOffsetPerColumnFamily(4);
get.setMaxResultsPerColumnFamily(2);
get.addFamily(FAMILIES[1]);
get.addFamily(FAMILIES[2]);
result = ht.get(get);
kvListExp = new ArrayList<>();
//Exp: CF1:q4, q5, CF2: q4, q5
kvListExp.add(new KeyValue(ROW, FAMILIES[1], QUALIFIERS[4], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[1], QUALIFIERS[5], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[4], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[5], 1, VALUE));
verifyResult(result, kvListExp, toLog, "Testing offset + multiple CFs + maxResults");
}
use of org.apache.hadoop.hbase.filter.ColumnRangeFilter in project janusgraph by JanusGraph.
the class HBaseKeyColumnValueStore method getFilter.
public static Filter getFilter(SliceQuery query) {
byte[] colStartBytes = query.getSliceStart().length() > 0 ? query.getSliceStart().as(StaticBuffer.ARRAY_FACTORY) : null;
byte[] colEndBytes = query.getSliceEnd().length() > 0 ? query.getSliceEnd().as(StaticBuffer.ARRAY_FACTORY) : null;
Filter filter = new ColumnRangeFilter(colStartBytes, true, colEndBytes, false);
if (query.hasLimit()) {
filter = new FilterList(FilterList.Operator.MUST_PASS_ALL, filter, new ColumnPaginationFilter(query.getLimit(), 0));
}
logger.debug("Generated HBase Filter {}", filter);
return filter;
}
use of org.apache.hadoop.hbase.filter.ColumnRangeFilter in project hbase by apache.
the class TestScannersFromClientSide method testGetMaxResults.
/**
* Test from client side for get with maxResultPerCF set
*
* @throws Exception
*/
@Test
public void testGetMaxResults() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3);
byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 20);
Table ht = TEST_UTIL.createTable(tableName, FAMILIES);
Get get;
Put put;
Result result;
boolean toLog = true;
List<Cell> kvListExp;
kvListExp = new ArrayList<>();
// Insert one CF for row[0]
put = new Put(ROW);
for (int i = 0; i < 10; i++) {
KeyValue kv = new KeyValue(ROW, FAMILIES[0], QUALIFIERS[i], 1, VALUE);
put.add(kv);
kvListExp.add(kv);
}
ht.put(put);
get = new Get(ROW);
result = ht.get(get);
verifyResult(result, kvListExp, toLog, "Testing without setting maxResults");
get = new Get(ROW);
get.setMaxResultsPerColumnFamily(2);
result = ht.get(get);
kvListExp = new ArrayList<>();
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[0], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[1], 1, VALUE));
verifyResult(result, kvListExp, toLog, "Testing basic setMaxResults");
// Filters: ColumnRangeFilter
get = new Get(ROW);
get.setMaxResultsPerColumnFamily(5);
get.setFilter(new ColumnRangeFilter(QUALIFIERS[2], true, QUALIFIERS[5], true));
result = ht.get(get);
kvListExp = new ArrayList<>();
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[2], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[3], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[4], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[5], 1, VALUE));
verifyResult(result, kvListExp, toLog, "Testing single CF with CRF");
// Insert two more CF for row[0]
// 20 columns for CF2, 10 columns for CF1
put = new Put(ROW);
for (int i = 0; i < QUALIFIERS.length; i++) {
KeyValue kv = new KeyValue(ROW, FAMILIES[2], QUALIFIERS[i], 1, VALUE);
put.add(kv);
}
ht.put(put);
put = new Put(ROW);
for (int i = 0; i < 10; i++) {
KeyValue kv = new KeyValue(ROW, FAMILIES[1], QUALIFIERS[i], 1, VALUE);
put.add(kv);
}
ht.put(put);
get = new Get(ROW);
get.setMaxResultsPerColumnFamily(12);
get.addFamily(FAMILIES[1]);
get.addFamily(FAMILIES[2]);
result = ht.get(get);
kvListExp = new ArrayList<>();
//Exp: CF1:q0, ..., q9, CF2: q0, q1, q10, q11, ..., q19
for (int i = 0; i < 10; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[1], QUALIFIERS[i], 1, VALUE));
}
for (int i = 0; i < 2; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[i], 1, VALUE));
}
for (int i = 10; i < 20; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[i], 1, VALUE));
}
verifyResult(result, kvListExp, toLog, "Testing multiple CFs");
// Filters: ColumnRangeFilter and ColumnPrefixFilter
get = new Get(ROW);
get.setMaxResultsPerColumnFamily(3);
get.setFilter(new ColumnRangeFilter(QUALIFIERS[2], true, null, true));
result = ht.get(get);
kvListExp = new ArrayList<>();
for (int i = 2; i < 5; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[i], 1, VALUE));
}
for (int i = 2; i < 5; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[1], QUALIFIERS[i], 1, VALUE));
}
for (int i = 2; i < 5; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[i], 1, VALUE));
}
verifyResult(result, kvListExp, toLog, "Testing multiple CFs + CRF");
get = new Get(ROW);
get.setMaxResultsPerColumnFamily(7);
get.setFilter(new ColumnPrefixFilter(QUALIFIERS[1]));
result = ht.get(get);
kvListExp = new ArrayList<>();
kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[1], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[1], QUALIFIERS[1], 1, VALUE));
kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[1], 1, VALUE));
for (int i = 10; i < 16; i++) {
kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[i], 1, VALUE));
}
verifyResult(result, kvListExp, toLog, "Testing multiple CFs + PFF");
}
use of org.apache.hadoop.hbase.filter.ColumnRangeFilter in project hbase by apache.
the class TestPartialResultsFromClientSide method testPartialResultsWithColumnFilter.
/**
* Test partial Result re-assembly in the presence of different filters. The Results from the
* partial scanner should match the Results returned from a scanner that receives all of the
* results in one RPC to the server. The partial scanner is tested with a variety of different
* result sizes (all of which are less than the size necessary to fetch an entire row)
* @throws Exception
*/
@Test
public void testPartialResultsWithColumnFilter() throws Exception {
testPartialResultsWithColumnFilter(new FirstKeyOnlyFilter());
testPartialResultsWithColumnFilter(new ColumnPrefixFilter(Bytes.toBytes("testQualifier5")));
testPartialResultsWithColumnFilter(new ColumnRangeFilter(Bytes.toBytes("testQualifer1"), true, Bytes.toBytes("testQualifier7"), true));
Set<byte[]> qualifiers = new LinkedHashSet<>();
qualifiers.add(Bytes.toBytes("testQualifier5"));
testPartialResultsWithColumnFilter(new FirstKeyValueMatchingQualifiersFilter(qualifiers));
}
use of org.apache.hadoop.hbase.filter.ColumnRangeFilter in project titan by thinkaurelius.
the class HBaseKeyColumnValueStore method getFilter.
public static Filter getFilter(SliceQuery query) {
byte[] colStartBytes = query.getSliceEnd().length() > 0 ? query.getSliceStart().as(StaticBuffer.ARRAY_FACTORY) : null;
byte[] colEndBytes = query.getSliceEnd().length() > 0 ? query.getSliceEnd().as(StaticBuffer.ARRAY_FACTORY) : null;
Filter filter = new ColumnRangeFilter(colStartBytes, true, colEndBytes, false);
if (query.hasLimit()) {
filter = new FilterList(FilterList.Operator.MUST_PASS_ALL, filter, new ColumnPaginationFilter(query.getLimit(), 0));
}
logger.debug("Generated HBase Filter {}", filter);
return filter;
}
Aggregations