use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestMultipleColumnPrefixFilter method testMultipleColumnPrefixFilterWithManyFamilies.
@Test
public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException {
String family1 = "Family1";
String family2 = "Family2";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
HColumnDescriptor hcd1 = new HColumnDescriptor(family1);
hcd1.setMaxVersions(3);
htd.addFamily(hcd1);
HColumnDescriptor hcd2 = new HColumnDescriptor(family2);
hcd2.setMaxVersions(3);
htd.addFamily(hcd2);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
List<String> rows = generateRandomWords(100, "row");
List<String> columns = generateRandomWords(10000, "column");
long maxTimestamp = 3;
List<Cell> kvList = new ArrayList<>();
Map<String, List<Cell>> prefixMap = new HashMap<>();
prefixMap.put("p", new ArrayList<>());
prefixMap.put("q", new ArrayList<>());
prefixMap.put("s", new ArrayList<>());
String valueString = "ValueString";
for (String row : rows) {
Put p = new Put(Bytes.toBytes(row));
p.setDurability(Durability.SKIP_WAL);
for (String column : columns) {
for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
double rand = Math.random();
Cell kv;
if (rand < 0.5)
kv = KeyValueTestUtil.create(row, family1, column, timestamp, valueString);
else
kv = KeyValueTestUtil.create(row, family2, column, timestamp, valueString);
p.add(kv);
kvList.add(kv);
for (String s : prefixMap.keySet()) {
if (column.startsWith(s)) {
prefixMap.get(s).add(kv);
}
}
}
}
region.put(p);
}
MultipleColumnPrefixFilter filter;
Scan scan = new Scan();
scan.setMaxVersions();
byte[][] filter_prefix = new byte[2][];
filter_prefix[0] = new byte[] { 'p' };
filter_prefix[1] = new byte[] { 'q' };
filter = new MultipleColumnPrefixFilter(filter_prefix);
scan.setFilter(filter);
List<Cell> results = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
while (scanner.next(results)) ;
assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
HBaseTestingUtility.closeRegionAndWAL(region);
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestInvocationRecordFilter method verifyInvocationResults.
public void verifyInvocationResults(Integer[] selectQualifiers, Integer[] expectedQualifiers) throws Exception {
Get get = new Get(ROW_BYTES);
for (int i = 0; i < selectQualifiers.length; i++) {
get.addColumn(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + selectQualifiers[i]));
}
get.setFilter(new InvocationRecordFilter());
List<KeyValue> expectedValues = new ArrayList<>();
for (int i = 0; i < expectedQualifiers.length; i++) {
expectedValues.add(new KeyValue(ROW_BYTES, FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + expectedQualifiers[i]), expectedQualifiers[i], Bytes.toBytes(VALUE_PREFIX + expectedQualifiers[i])));
}
Scan scan = new Scan(get);
List<Cell> actualValues = new ArrayList<>();
List<Cell> temp = new ArrayList<>();
InternalScanner scanner = this.region.getScanner(scan);
while (scanner.next(temp)) {
actualValues.addAll(temp);
temp.clear();
}
actualValues.addAll(temp);
Assert.assertTrue("Actual values " + actualValues + " differ from the expected values:" + expectedValues, expectedValues.equals(actualValues));
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestDateTieredCompactor method createCompactor.
private DateTieredCompactor createCompactor(StoreFileWritersCapture writers, final KeyValue[] input, List<StoreFile> storefiles) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
final Scanner scanner = new Scanner(input);
// Create store mock that is satisfactory for compactor.
HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS);
ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR);
final Store store = mock(Store.class);
when(store.getStorefiles()).thenReturn(storefiles);
when(store.getFamily()).thenReturn(col);
when(store.getScanInfo()).thenReturn(si);
when(store.areWritesEnabled()).thenReturn(true);
when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
when(store.getComparator()).thenReturn(CellComparator.COMPARATOR);
long maxSequenceId = StoreFile.getMaxSequenceIdInList(storefiles);
when(store.getMaxSequenceId()).thenReturn(maxSequenceId);
return new DateTieredCompactor(conf, store) {
@Override
protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
return scanner;
}
@Override
protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
return scanner;
}
};
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestStripeCompactionPolicy method createCompactor.
private StripeCompactor createCompactor() throws Exception {
HColumnDescriptor col = new HColumnDescriptor(Bytes.toBytes("foo"));
StoreFileWritersCapture writers = new StoreFileWritersCapture();
Store store = mock(Store.class);
HRegionInfo info = mock(HRegionInfo.class);
when(info.getRegionNameAsString()).thenReturn("testRegion");
when(store.getFamily()).thenReturn(col);
when(store.getRegionInfo()).thenReturn(info);
when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
Configuration conf = HBaseConfiguration.create();
conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
final Scanner scanner = new Scanner();
return new StripeCompactor(conf, store) {
@Override
protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
return scanner;
}
@Override
protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
return scanner;
}
};
}
Aggregations