use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class AggregateImplementation method getMedian.
/**
* Gives a List containing sum of values and sum of weights.
* It is computed for the combination of column
* family and column qualifier(s) in the given row range as defined in the
* Scan object. In its current implementation, it takes one column family and
* two column qualifiers. The first qualifier is for values column and
* the second qualifier (optional) is for weight column.
*/
@Override
public void getMedian(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) {
AggregateResponse response = null;
InternalScanner scanner = null;
try {
ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
byte[] valQualifier = null, weightQualifier = null;
if (qualifiers != null && !qualifiers.isEmpty()) {
valQualifier = qualifiers.pollFirst();
// if weighted median is requested, get qualifier for the weight column
weightQualifier = qualifiers.pollLast();
}
List<Cell> results = new ArrayList<>();
boolean hasMoreRows = false;
do {
tempVal = null;
tempWeight = null;
hasMoreRows = scanner.next(results);
int listSize = results.size();
for (int i = 0; i < listSize; i++) {
Cell kv = results.get(i);
tempVal = ci.add(tempVal, ci.castToReturnType(ci.getValue(colFamily, valQualifier, kv)));
if (weightQualifier != null) {
tempWeight = ci.add(tempWeight, ci.castToReturnType(ci.getValue(colFamily, weightQualifier, kv)));
}
}
results.clear();
sumVal = ci.add(sumVal, tempVal);
sumWeights = ci.add(sumWeights, tempWeight);
} while (hasMoreRows);
ByteString first_sumVal = ci.getProtoForPromotedType(sumVal).toByteString();
S s = sumWeights == null ? ci.castToReturnType(ci.getMinValue()) : sumWeights;
ByteString first_sumWeights = ci.getProtoForPromotedType(s).toByteString();
AggregateResponse.Builder pair = AggregateResponse.newBuilder();
pair.addFirstPart(first_sumVal);
pair.addFirstPart(first_sumWeights);
response = pair.build();
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
} finally {
if (scanner != null) {
try {
scanner.close();
} catch (IOException ignored) {
}
}
}
done.run(response);
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class ColumnAggregationEndpoint method sum.
@Override
public void sum(RpcController controller, SumRequest request, RpcCallback<SumResponse> done) {
// aggregate at each region
Scan scan = new Scan();
// Family is required in pb. Qualifier is not.
byte[] family = request.getFamily().toByteArray();
byte[] qualifier = request.hasQualifier() ? request.getQualifier().toByteArray() : null;
if (request.hasQualifier()) {
scan.addColumn(family, qualifier);
} else {
scan.addFamily(family);
}
int sumResult = 0;
InternalScanner scanner = null;
try {
scanner = this.env.getRegion().getScanner(scan);
List<Cell> curVals = new ArrayList<>();
boolean hasMore = false;
do {
curVals.clear();
hasMore = scanner.next(curVals);
for (Cell kv : curVals) {
if (CellUtil.matchingQualifier(kv, qualifier)) {
sumResult += Bytes.toInt(kv.getValueArray(), kv.getValueOffset());
}
}
} while (hasMore);
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
// Set result to -1 to indicate error.
sumResult = -1;
LOG.info("Setting sum result to -1 to indicate error", e);
} finally {
if (scanner != null) {
try {
scanner.close();
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
sumResult = -1;
LOG.info("Setting sum result to -1 to indicate error", e);
}
}
}
LOG.info("Returning result " + sumResult);
done.run(SumResponse.newBuilder().setSum(sumResult).build());
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestStripeCompactor method createCompactor.
private StripeCompactor createCompactor(StoreFileWritersCapture writers, KeyValue[] input) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
final Scanner scanner = new Scanner(input);
// Create store mock that is satisfactory for compactor.
HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS);
ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR);
Store store = mock(Store.class);
when(store.getFamily()).thenReturn(col);
when(store.getScanInfo()).thenReturn(si);
when(store.areWritesEnabled()).thenReturn(true);
when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
when(store.getComparator()).thenReturn(CellComparator.COMPARATOR);
return new StripeCompactor(conf, store) {
@Override
protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
return scanner;
}
@Override
protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
return scanner;
}
};
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestScannerFromBucketCache method testBasicScanWithOffheapBucketCacheWithMBB.
@Test
public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException {
setUp(true, true);
byte[] row1 = Bytes.toBytes("row1offheap");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("famoffheap");
// System.currentTimeMillis();
long ts1 = 1;
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
String method = this.getName();
this.region = initHRegion(tableName, method, conf, test_util, fam1);
try {
List<Cell> expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true);
List<Cell> actual = performScan(row1, fam1);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertFalse(actual.get(i) instanceof ByteBufferKeyValue);
assertTrue(CellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
// Wait for the bucket cache threads to move the data to offheap
Thread.sleep(500);
// do the scan again and verify. This time it should be from the bucket cache in offheap mode
// but one of the cell will be copied due to the asSubByteBuff call
Scan scan = new Scan(row1);
scan.addFamily(fam1);
scan.setMaxVersions(10);
actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
if (i != 5) {
// the last cell fetched will be of type shareable but not offheap because
// the MBB is copied to form a single cell
assertTrue(actual.get(i) instanceof ByteBufferKeyValue);
}
}
} catch (InterruptedException e) {
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
use of org.apache.hadoop.hbase.regionserver.InternalScanner in project hbase by apache.
the class TestFilter method testFilterListWithPrefixFilter.
// HBASE-9747
@Test
public void testFilterListWithPrefixFilter() throws IOException {
byte[] family = Bytes.toBytes("f1");
byte[] qualifier = Bytes.toBytes("q1");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(family));
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
Region testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
for (int i = 0; i < 5; i++) {
Put p = new Put(Bytes.toBytes((char) ('a' + i) + "row"));
p.setDurability(Durability.SKIP_WAL);
p.addColumn(family, qualifier, Bytes.toBytes(String.valueOf(111 + i)));
testRegion.put(p);
}
testRegion.flush(true);
// rows starting with "b"
PrefixFilter pf = new PrefixFilter(new byte[] { 'b' });
// rows with value of column 'q1' set to '113'
SingleColumnValueFilter scvf = new SingleColumnValueFilter(family, qualifier, CompareOp.EQUAL, Bytes.toBytes("113"));
// combine these two with OR in a FilterList
FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, pf, scvf);
Scan s1 = new Scan();
s1.setFilter(filterList);
InternalScanner scanner = testRegion.getScanner(s1);
List<Cell> results = new ArrayList<>();
int resultCount = 0;
while (scanner.next(results)) {
resultCount++;
byte[] row = CellUtil.cloneRow(results.get(0));
LOG.debug("Found row: " + Bytes.toStringBinary(row));
assertTrue(Bytes.equals(row, Bytes.toBytes("brow")) || Bytes.equals(row, Bytes.toBytes("crow")));
results.clear();
}
assertEquals(2, resultCount);
scanner.close();
WAL wal = ((HRegion) testRegion).getWAL();
((HRegion) testRegion).close();
wal.close();
}
Aggregations