use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestMultipleColumnPrefixFilter method testMultipleColumnPrefixFilter.
@Test
public void testMultipleColumnPrefixFilter() throws IOException {
String family = "Family";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
HColumnDescriptor hcd = new HColumnDescriptor(family);
hcd.setMaxVersions(3);
htd.addFamily(hcd);
// HRegionInfo info = new HRegionInfo(htd, null, null, false);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
List<String> rows = generateRandomWords(100, "row");
List<String> columns = generateRandomWords(10000, "column");
long maxTimestamp = 2;
List<Cell> kvList = new ArrayList<>();
Map<String, List<Cell>> prefixMap = new HashMap<>();
prefixMap.put("p", new ArrayList<>());
prefixMap.put("q", new ArrayList<>());
prefixMap.put("s", new ArrayList<>());
String valueString = "ValueString";
for (String row : rows) {
Put p = new Put(Bytes.toBytes(row));
p.setDurability(Durability.SKIP_WAL);
for (String column : columns) {
for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
KeyValue kv = KeyValueTestUtil.create(row, family, column, timestamp, valueString);
p.add(kv);
kvList.add(kv);
for (String s : prefixMap.keySet()) {
if (column.startsWith(s)) {
prefixMap.get(s).add(kv);
}
}
}
}
region.put(p);
}
MultipleColumnPrefixFilter filter;
Scan scan = new Scan();
scan.setMaxVersions();
byte[][] filter_prefix = new byte[2][];
filter_prefix[0] = new byte[] { 'p' };
filter_prefix[1] = new byte[] { 'q' };
filter = new MultipleColumnPrefixFilter(filter_prefix);
scan.setFilter(filter);
List<Cell> results = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
while (scanner.next(results)) ;
assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
HBaseTestingUtility.closeRegionAndWAL(region);
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestMultipleColumnPrefixFilter method testMultipleColumnPrefixFilterWithColumnPrefixFilter.
@Test
public void testMultipleColumnPrefixFilterWithColumnPrefixFilter() throws IOException {
String family = "Family";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(family));
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
List<String> rows = generateRandomWords(100, "row");
List<String> columns = generateRandomWords(10000, "column");
long maxTimestamp = 2;
String valueString = "ValueString";
for (String row : rows) {
Put p = new Put(Bytes.toBytes(row));
p.setDurability(Durability.SKIP_WAL);
for (String column : columns) {
for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
KeyValue kv = KeyValueTestUtil.create(row, family, column, timestamp, valueString);
p.add(kv);
}
}
region.put(p);
}
MultipleColumnPrefixFilter multiplePrefixFilter;
Scan scan1 = new Scan();
scan1.setMaxVersions();
byte[][] filter_prefix = new byte[1][];
filter_prefix[0] = new byte[] { 'p' };
multiplePrefixFilter = new MultipleColumnPrefixFilter(filter_prefix);
scan1.setFilter(multiplePrefixFilter);
List<Cell> results1 = new ArrayList<>();
InternalScanner scanner1 = region.getScanner(scan1);
while (scanner1.next(results1)) ;
ColumnPrefixFilter singlePrefixFilter;
Scan scan2 = new Scan();
scan2.setMaxVersions();
singlePrefixFilter = new ColumnPrefixFilter(Bytes.toBytes("p"));
scan2.setFilter(singlePrefixFilter);
List<Cell> results2 = new ArrayList<>();
InternalScanner scanner2 = region.getScanner(scan1);
while (scanner2.next(results2)) ;
assertEquals(results1.size(), results2.size());
HBaseTestingUtility.closeRegionAndWAL(region);
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestMultipleColumnPrefixFilter method testMultipleColumnPrefixFilterWithManyFamilies.
@Test
public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException {
String family1 = "Family1";
String family2 = "Family2";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
HColumnDescriptor hcd1 = new HColumnDescriptor(family1);
hcd1.setMaxVersions(3);
htd.addFamily(hcd1);
HColumnDescriptor hcd2 = new HColumnDescriptor(family2);
hcd2.setMaxVersions(3);
htd.addFamily(hcd2);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
List<String> rows = generateRandomWords(100, "row");
List<String> columns = generateRandomWords(10000, "column");
long maxTimestamp = 3;
List<Cell> kvList = new ArrayList<>();
Map<String, List<Cell>> prefixMap = new HashMap<>();
prefixMap.put("p", new ArrayList<>());
prefixMap.put("q", new ArrayList<>());
prefixMap.put("s", new ArrayList<>());
String valueString = "ValueString";
for (String row : rows) {
Put p = new Put(Bytes.toBytes(row));
p.setDurability(Durability.SKIP_WAL);
for (String column : columns) {
for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
double rand = Math.random();
Cell kv;
if (rand < 0.5)
kv = KeyValueTestUtil.create(row, family1, column, timestamp, valueString);
else
kv = KeyValueTestUtil.create(row, family2, column, timestamp, valueString);
p.add(kv);
kvList.add(kv);
for (String s : prefixMap.keySet()) {
if (column.startsWith(s)) {
prefixMap.get(s).add(kv);
}
}
}
}
region.put(p);
}
MultipleColumnPrefixFilter filter;
Scan scan = new Scan();
scan.setMaxVersions();
byte[][] filter_prefix = new byte[2][];
filter_prefix[0] = new byte[] { 'p' };
filter_prefix[1] = new byte[] { 'q' };
filter = new MultipleColumnPrefixFilter(filter_prefix);
scan.setFilter(filter);
List<Cell> results = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
while (scanner.next(results)) ;
assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
HBaseTestingUtility.closeRegionAndWAL(region);
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestInvocationRecordFilter method verifyInvocationResults.
public void verifyInvocationResults(Integer[] selectQualifiers, Integer[] expectedQualifiers) throws Exception {
Get get = new Get(ROW_BYTES);
for (int i = 0; i < selectQualifiers.length; i++) {
get.addColumn(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + selectQualifiers[i]));
}
get.setFilter(new InvocationRecordFilter());
List<KeyValue> expectedValues = new ArrayList<>();
for (int i = 0; i < expectedQualifiers.length; i++) {
expectedValues.add(new KeyValue(ROW_BYTES, FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + expectedQualifiers[i]), expectedQualifiers[i], Bytes.toBytes(VALUE_PREFIX + expectedQualifiers[i])));
}
Scan scan = new Scan(get);
List<Cell> actualValues = new ArrayList<>();
List<Cell> temp = new ArrayList<>();
InternalScanner scanner = this.region.getScanner(scan);
while (scanner.next(temp)) {
actualValues.addAll(temp);
temp.clear();
}
actualValues.addAll(temp);
Assert.assertTrue("Actual values " + actualValues + " differ from the expected values:" + expectedValues, expectedValues.equals(actualValues));
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class ColumnAggregationEndpointNullResponse method sum.
@Override
public void sum(RpcController controller, ColumnAggregationNullResponseSumRequest request, RpcCallback<ColumnAggregationNullResponseSumResponse> done) {
// aggregate at each region
Scan scan = new Scan();
// Family is required in pb. Qualifier is not.
byte[] family = request.getFamily().toByteArray();
byte[] qualifier = request.hasQualifier() ? request.getQualifier().toByteArray() : null;
if (request.hasQualifier()) {
scan.addColumn(family, qualifier);
} else {
scan.addFamily(family);
}
int sumResult = 0;
InternalScanner scanner = null;
try {
Region region = this.env.getRegion();
// for the last region in the table, return null to test null handling
if (Bytes.equals(region.getRegionInfo().getEndKey(), HConstants.EMPTY_END_ROW)) {
done.run(null);
return;
}
scanner = region.getScanner(scan);
List<Cell> curVals = new ArrayList<>();
boolean hasMore = false;
do {
curVals.clear();
hasMore = scanner.next(curVals);
for (Cell kv : curVals) {
if (CellUtil.matchingQualifier(kv, qualifier)) {
sumResult += Bytes.toInt(kv.getValueArray(), kv.getValueOffset());
}
}
} while (hasMore);
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
// Set result to -1 to indicate error.
sumResult = -1;
LOG.info("Setting sum result to -1 to indicate error", e);
} finally {
if (scanner != null) {
try {
scanner.close();
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
sumResult = -1;
LOG.info("Setting sum result to -1 to indicate error", e);
}
}
}
done.run(ColumnAggregationNullResponseSumResponse.newBuilder().setSum(sumResult).build());
LOG.info("Returning sum " + sumResult + " for region " + Bytes.toStringBinary(env.getRegion().getRegionInfo().getRegionName()));
}
Aggregations