use of org.apache.hadoop.hbase.client.metrics.ScanMetrics in project hbase by apache.
the class ScanPerformanceEvaluation method testScan.
public void testScan() throws IOException {
Stopwatch tableOpenTimer = Stopwatch.createUnstarted();
Stopwatch scanOpenTimer = Stopwatch.createUnstarted();
Stopwatch scanTimer = Stopwatch.createUnstarted();
tableOpenTimer.start();
Connection connection = ConnectionFactory.createConnection(getConf());
Table table = connection.getTable(TableName.valueOf(tablename));
tableOpenTimer.stop();
Scan scan = getScan();
scanOpenTimer.start();
ResultScanner scanner = table.getScanner(scan);
scanOpenTimer.stop();
long numRows = 0;
long numCells = 0;
scanTimer.start();
while (true) {
Result result = scanner.next();
if (result == null) {
break;
}
numRows++;
numCells += result.rawCells().length;
}
scanTimer.stop();
scanner.close();
table.close();
connection.close();
ScanMetrics metrics = scanner.getScanMetrics();
long totalBytes = metrics.countOfBytesInResults.get();
double throughput = (double) totalBytes / scanTimer.elapsed(TimeUnit.SECONDS);
double throughputRows = (double) numRows / scanTimer.elapsed(TimeUnit.SECONDS);
double throughputCells = (double) numCells / scanTimer.elapsed(TimeUnit.SECONDS);
System.out.println("HBase scan: ");
System.out.println("total time to open table: " + tableOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("total time to open scanner: " + scanOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("Scan metrics:\n" + metrics.getMetricsMap());
System.out.println("total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
System.out.println("total rows : " + numRows);
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
System.out.println("total cells : " + numCells);
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");
}
use of org.apache.hadoop.hbase.client.metrics.ScanMetrics in project hbase by apache.
the class TestFromClientSide method getScanMetrics.
private ScanMetrics getScanMetrics(Scan scan) throws Exception {
byte[] serializedMetrics = scan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_DATA);
assertTrue("Serialized metrics were not found.", serializedMetrics != null);
ScanMetrics scanMetrics = ProtobufUtil.toScanMetrics(serializedMetrics);
return scanMetrics;
}
use of org.apache.hadoop.hbase.client.metrics.ScanMetrics in project hbase by apache.
the class TestServerSideScanMetricsFromClientSide method testMetric.
/**
* Run the scan to completetion and check the metric against the specified value
* @param scan The scan instance to use to record metrics
* @param metricKey The metric key name
* @param expectedValue The expected value of metric
* @throws Exception on unexpected failure
*/
private void testMetric(Scan scan, String metricKey, long expectedValue) throws Exception {
assertTrue("Scan should be configured to record metrics", scan.isScanMetricsEnabled());
ResultScanner scanner = TABLE.getScanner(scan);
// Iterate through all the results
while (scanner.next() != null) {
continue;
}
scanner.close();
ScanMetrics metrics = scanner.getScanMetrics();
assertTrue("Metrics are null", metrics != null);
assertTrue("Metric : " + metricKey + " does not exist", metrics.hasCounter(metricKey));
final long actualMetricValue = metrics.getCounter(metricKey).get();
assertEquals("Metric: " + metricKey + " Expected: " + expectedValue + " Actual: " + actualMetricValue, expectedValue, actualMetricValue);
}
use of org.apache.hadoop.hbase.client.metrics.ScanMetrics in project hbase by apache.
the class TestAsyncTableScanMetrics method testScanMetrics.
@Test
public void testScanMetrics() throws Exception {
Pair<List<Result>, ScanMetrics> pair = method.scan(new Scan().setScanMetricsEnabled(true));
List<Result> results = pair.getFirst();
assertEquals(3, results.size());
long bytes = results.stream().flatMap(r -> Arrays.asList(r.rawCells()).stream()).mapToLong(c -> PrivateCellUtil.estimatedSerializedSizeOf(c)).sum();
ScanMetrics scanMetrics = pair.getSecond();
assertEquals(NUM_REGIONS, scanMetrics.countOfRegions.get());
assertEquals(bytes, scanMetrics.countOfBytesInResults.get());
assertEquals(NUM_REGIONS, scanMetrics.countOfRPCcalls.get());
// also assert a server side metric to ensure that we have published them into the client side
// metrics.
assertEquals(3, scanMetrics.countOfRowsScanned.get());
}
use of org.apache.hadoop.hbase.client.metrics.ScanMetrics in project hbase by apache.
the class TestFromClientSide5 method testScanMetrics.
/**
* Test ScanMetrics
*/
@Test
@SuppressWarnings({ "unused", "checkstyle:EmptyBlock" })
public void testScanMetrics() throws Exception {
final TableName tableName = name.getTableName();
// Create table:
try (Table ht = TEST_UTIL.createMultiRegionTable(tableName, FAMILY)) {
int numOfRegions;
try (RegionLocator r = TEST_UTIL.getConnection().getRegionLocator(tableName)) {
numOfRegions = r.getStartKeys().length;
}
// Create 3 rows in the table, with rowkeys starting with "zzz*" so that
// scan are forced to hit all the regions.
Put put1 = new Put(Bytes.toBytes("zzz1"));
put1.addColumn(FAMILY, QUALIFIER, VALUE);
Put put2 = new Put(Bytes.toBytes("zzz2"));
put2.addColumn(FAMILY, QUALIFIER, VALUE);
Put put3 = new Put(Bytes.toBytes("zzz3"));
put3.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(Arrays.asList(put1, put2, put3));
Scan scan1 = new Scan();
int numRecords = 0;
try (ResultScanner scanner = ht.getScanner(scan1)) {
for (Result result : scanner) {
numRecords++;
}
LOG.info("test data has {} records.", numRecords);
// by default, scan metrics collection is turned off
assertNull(scanner.getScanMetrics());
}
// turn on scan metrics
Scan scan2 = new Scan();
scan2.setScanMetricsEnabled(true);
scan2.setCaching(numRecords + 1);
try (ResultScanner scanner = ht.getScanner(scan2)) {
for (Result result : scanner.next(numRecords - 1)) {
}
assertNotNull(scanner.getScanMetrics());
}
// set caching to 1, because metrics are collected in each roundtrip only
scan2 = new Scan();
scan2.setScanMetricsEnabled(true);
scan2.setCaching(1);
try (ResultScanner scanner = ht.getScanner(scan2)) {
// the end of the scanner. So this is asking for 2 of the 3 rows we inserted.
for (Result result : scanner.next(numRecords - 1)) {
}
ScanMetrics scanMetrics = scanner.getScanMetrics();
assertEquals("Did not access all the regions in the table", numOfRegions, scanMetrics.countOfRegions.get());
}
// check byte counters
scan2 = new Scan();
scan2.setScanMetricsEnabled(true);
scan2.setCaching(1);
try (ResultScanner scanner = ht.getScanner(scan2)) {
int numBytes = 0;
for (Result result : scanner) {
for (Cell cell : result.listCells()) {
numBytes += PrivateCellUtil.estimatedSerializedSizeOf(cell);
}
}
ScanMetrics scanMetrics = scanner.getScanMetrics();
assertEquals("Did not count the result bytes", numBytes, scanMetrics.countOfBytesInResults.get());
}
// check byte counters on a small scan
scan2 = new Scan();
scan2.setScanMetricsEnabled(true);
scan2.setCaching(1);
scan2.setReadType(ReadType.PREAD);
try (ResultScanner scanner = ht.getScanner(scan2)) {
int numBytes = 0;
for (Result result : scanner) {
for (Cell cell : result.listCells()) {
numBytes += PrivateCellUtil.estimatedSerializedSizeOf(cell);
}
}
ScanMetrics scanMetrics = scanner.getScanMetrics();
assertEquals("Did not count the result bytes", numBytes, scanMetrics.countOfBytesInResults.get());
}
// now, test that the metrics are still collected even if you don't call close, but do
// run past the end of all the records
/**
* There seems to be a timing issue here. Comment out for now. Fix when time.
* Scan scanWithoutClose = new Scan();
* scanWithoutClose.setCaching(1);
* scanWithoutClose.setScanMetricsEnabled(true);
* ResultScanner scannerWithoutClose = ht.getScanner(scanWithoutClose);
* for (Result result : scannerWithoutClose.next(numRecords + 1)) {
* }
* ScanMetrics scanMetricsWithoutClose = getScanMetrics(scanWithoutClose);
* assertEquals("Did not access all the regions in the table", numOfRegions,
* scanMetricsWithoutClose.countOfRegions.get());
*/
// finally,
// test that the metrics are collected correctly if you both run past all the records,
// AND close the scanner
Scan scanWithClose = new Scan();
// make sure we can set caching up to the number of a scanned values
scanWithClose.setCaching(numRecords);
scanWithClose.setScanMetricsEnabled(true);
try (ResultScanner scannerWithClose = ht.getScanner(scanWithClose)) {
for (Result result : scannerWithClose.next(numRecords + 1)) {
}
scannerWithClose.close();
ScanMetrics scanMetricsWithClose = scannerWithClose.getScanMetrics();
assertEquals("Did not access all the regions in the table", numOfRegions, scanMetricsWithClose.countOfRegions.get());
}
} finally {
TEST_UTIL.deleteTable(tableName);
}
}
Aggregations