use of org.apache.hadoop.hbase.thrift2.generated.TResult in project hbase by apache.
the class TestThriftHBaseServiceHandler method testScanWithBatchSize.
@Test
public void testScanWithBatchSize() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
List<TColumnValue> columnValues = new ArrayList<>(100);
for (int i = 0; i < 100; i++) {
String colNum = pad(i, (byte) 3);
TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(Bytes.toBytes("col" + colNum)), wrap(Bytes.toBytes("val" + colNum)));
columnValues.add(columnValue);
}
TPut put = new TPut(wrap(Bytes.toBytes("testScanWithBatchSize")), columnValues);
handler.put(table, put);
// create scan instance
TScan scan = new TScan();
List<TColumn> columns = new ArrayList<>(1);
TColumn column = new TColumn();
column.setFamily(familyAname);
columns.add(column);
scan.setColumns(columns);
scan.setStartRow(Bytes.toBytes("testScanWithBatchSize"));
scan.setStopRow(Bytes.toBytes("testScanWithBatchSize\uffff"));
// set batch size to 10 columns per call
scan.setBatchSize(10);
// get scanner
int scanId = handler.openScanner(table, scan);
List<TResult> results = null;
for (int i = 0; i < 10; i++) {
// get batch for single row (10x10 is what we expect)
results = handler.getScannerRows(scanId, 1);
assertEquals(1, results.size());
// check length of batch
List<TColumnValue> cols = results.get(0).getColumnValues();
assertEquals(10, cols.size());
// check if the columns are returned and in order
for (int y = 0; y < 10; y++) {
int colNum = y + (10 * i);
String colNumPad = pad(colNum, (byte) 3);
assertArrayEquals(Bytes.toBytes("col" + colNumPad), cols.get(y).getQualifier());
}
}
// check that we are at the end of the scan
results = handler.getScannerRows(scanId, 1);
assertEquals(0, results.size());
// close scanner and check that it was indeed closed
handler.closeScanner(scanId);
try {
handler.getScannerRows(scanId, 1);
fail("Scanner id should be invalid");
} catch (TIllegalArgument e) {
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TResult in project hbase by apache.
the class TestThriftHBaseServiceHandler method testGetScannerResults.
@Test
public void testGetScannerResults() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(columnValue);
for (int i = 0; i < 20; i++) {
TPut put = new TPut(wrap(Bytes.toBytes("testGetScannerResults" + pad(i, (byte) 2))), columnValues);
handler.put(table, put);
}
// create scan instance
TScan scan = new TScan();
List<TColumn> columns = new ArrayList<>(1);
TColumn column = new TColumn();
column.setFamily(familyAname);
column.setQualifier(qualifierAname);
columns.add(column);
scan.setColumns(columns);
scan.setStartRow(Bytes.toBytes("testGetScannerResults"));
// get 5 rows and check the returned results
scan.setStopRow(Bytes.toBytes("testGetScannerResults05"));
List<TResult> results = handler.getScannerResults(table, scan, 5);
assertEquals(5, results.size());
for (int i = 0; i < 5; i++) {
// check if the rows are returned and in order
assertArrayEquals(Bytes.toBytes("testGetScannerResults" + pad(i, (byte) 2)), results.get(i).getRow());
}
// get 10 rows and check the returned results
scan.setStopRow(Bytes.toBytes("testGetScannerResults10"));
results = handler.getScannerResults(table, scan, 10);
assertEquals(10, results.size());
for (int i = 0; i < 10; i++) {
// check if the rows are returned and in order
assertArrayEquals(Bytes.toBytes("testGetScannerResults" + pad(i, (byte) 2)), results.get(i).getRow());
}
// get 20 rows and check the returned results
scan.setStopRow(Bytes.toBytes("testGetScannerResults20"));
results = handler.getScannerResults(table, scan, 20);
assertEquals(20, results.size());
for (int i = 0; i < 20; i++) {
// check if the rows are returned and in order
assertArrayEquals(Bytes.toBytes("testGetScannerResults" + pad(i, (byte) 2)), results.get(i).getRow());
}
// reverse scan
scan = new TScan();
scan.setColumns(columns);
scan.setReversed(true);
scan.setStartRow(Bytes.toBytes("testGetScannerResults20"));
scan.setStopRow(Bytes.toBytes("testGetScannerResults"));
results = handler.getScannerResults(table, scan, 20);
assertEquals(20, results.size());
for (int i = 0; i < 20; i++) {
// check if the rows are returned and in order
assertArrayEquals(Bytes.toBytes("testGetScannerResults" + pad(19 - i, (byte) 2)), results.get(i).getRow());
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TResult in project hbase by apache.
the class TestThriftHBaseServiceHandler method testMetricsPrecision.
/**
* See HBASE-17611
*
* Latency metrics were capped at ~ 2 seconds due to the use of an int variable to capture the
* duration.
*/
@Test
public void testMetricsPrecision() throws Exception {
byte[] rowkey = Bytes.toBytes("row1");
byte[] family = Bytes.toBytes("f");
byte[] col = Bytes.toBytes("c");
// create a table which will throw exceptions for requests
TableName tableName = TableName.valueOf("testMetricsPrecision");
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(DelayingRegionObserver.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
Table table = null;
try {
table = UTIL.createTable(tableDescriptor, null);
table.put(new Put(rowkey).addColumn(family, col, Bytes.toBytes("val1")));
ThriftHBaseServiceHandler hbaseHandler = createHandler();
ThriftMetrics metrics = getMetrics(UTIL.getConfiguration());
THBaseService.Iface handler = HbaseHandlerMetricsProxy.newInstance(hbaseHandler, metrics, null);
ByteBuffer tTableName = wrap(tableName.getName());
// check metrics latency with a successful get
TGet tGet = new TGet(wrap(rowkey));
TResult tResult = handler.get(tTableName, tGet);
List<TColumnValue> expectedColumnValues = Lists.newArrayList(new TColumnValue(wrap(family), wrap(col), wrap(Bytes.toBytes("val1"))));
assertArrayEquals(rowkey, tResult.getRow());
List<TColumnValue> returnedColumnValues = tResult.getColumnValues();
assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
metricsHelper.assertGaugeGt("get_max", 3000L, metrics.getSource());
} finally {
if (table != null) {
try {
table.close();
} catch (IOException ignored) {
}
UTIL.deleteTable(tableName);
}
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TResult in project hbase by apache.
the class TestThriftHBaseServiceHandler method testScanWithFilter.
@Test
public void testScanWithFilter() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
// insert data
TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(columnValue);
for (int i = 0; i < 10; i++) {
TPut put = new TPut(wrap(Bytes.toBytes("testScanWithFilter" + i)), columnValues);
handler.put(table, put);
}
// create scan instance with filter
TScan scan = new TScan();
List<TColumn> columns = new ArrayList<>(1);
TColumn column = new TColumn();
column.setFamily(familyAname);
column.setQualifier(qualifierAname);
columns.add(column);
scan.setColumns(columns);
scan.setStartRow(Bytes.toBytes("testScanWithFilter"));
scan.setStopRow(Bytes.toBytes("testScanWithFilter\uffff"));
// only get the key part
scan.setFilterString(wrap(Bytes.toBytes("KeyOnlyFilter()")));
// get scanner and rows
int scanId = handler.openScanner(table, scan);
List<TResult> results = handler.getScannerRows(scanId, 10);
assertEquals(10, results.size());
for (int i = 0; i < 10; i++) {
// check if the rows are returned and in order
assertArrayEquals(Bytes.toBytes("testScanWithFilter" + i), results.get(i).getRow());
// check that the value is indeed stripped by the filter
assertEquals(0, results.get(i).getColumnValues().get(0).getValue().length);
}
// check that we are at the end of the scan
results = handler.getScannerRows(scanId, 10);
assertEquals(0, results.size());
// close scanner and check that it was indeed closed
handler.closeScanner(scanId);
try {
handler.getScannerRows(scanId, 10);
fail("Scanner id should be invalid");
} catch (TIllegalArgument e) {
}
}
use of org.apache.hadoop.hbase.thrift2.generated.TResult in project hbase by apache.
the class TestThriftHBaseServiceHandler method testDeleteFamily.
@Test
public void testDeleteFamily() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = Bytes.toBytes("testDeleteFamily");
ByteBuffer table = wrap(tableAname);
long timestamp1 = EnvironmentEdgeManager.currentTime() - 10;
long timestamp2 = EnvironmentEdgeManager.currentTime();
List<TColumnValue> columnValues = new ArrayList<>();
TColumnValue columnValueA = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
columnValueA.setTimestamp(timestamp1);
columnValues.add(columnValueA);
TPut put = new TPut(wrap(rowName), columnValues);
put.setColumnValues(columnValues);
handler.put(table, put);
columnValueA.setTimestamp(timestamp2);
handler.put(table, put);
TGet get = new TGet(wrap(rowName));
get.setMaxVersions(2);
TResult result = handler.get(table, get);
assertEquals(2, result.getColumnValuesSize());
TDelete delete = new TDelete(wrap(rowName));
List<TColumn> deleteColumns = new ArrayList<>();
TColumn deleteColumn = new TColumn(wrap(familyAname));
deleteColumns.add(deleteColumn);
delete.setColumns(deleteColumns);
delete.setDeleteType(TDeleteType.DELETE_FAMILY);
handler.deleteSingle(table, delete);
get = new TGet(wrap(rowName));
result = handler.get(table, get);
assertArrayEquals(null, result.getRow());
assertEquals(0, result.getColumnValuesSize());
}
Aggregations