use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.
the class TestLoadIncrementalHFilesSplitRecovery method assertExpectedTable.
/**
* Checks that all columns have the expected value and that there is the
* expected number of rows.
* @throws IOException
*/
void assertExpectedTable(final Connection connection, TableName table, int count, int value) throws IOException {
HTableDescriptor[] htds = util.getAdmin().listTables(table.getNameAsString());
assertEquals(htds.length, 1);
Table t = null;
try {
t = connection.getTable(table);
Scan s = new Scan();
ResultScanner sr = t.getScanner(s);
int i = 0;
for (Result r : sr) {
i++;
for (NavigableMap<byte[], byte[]> nm : r.getNoVersionMap().values()) {
for (byte[] val : nm.values()) {
assertTrue(Bytes.equals(val, value(value)));
}
}
}
assertEquals(count, i);
} catch (IOException e) {
fail("Failed due to exception");
} finally {
if (t != null)
t.close();
}
}
use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.
the class TestLoadIncrementalHFilesSplitRecovery method assertExpectedTable.
/**
* Checks that all columns have the expected value and that there is the
* expected number of rows.
* @throws IOException
*/
void assertExpectedTable(TableName table, int count, int value) throws IOException {
HTableDescriptor[] htds = util.getAdmin().listTables(table.getNameAsString());
assertEquals(htds.length, 1);
Table t = null;
try {
t = util.getConnection().getTable(table);
Scan s = new Scan();
ResultScanner sr = t.getScanner(s);
int i = 0;
for (Result r : sr) {
i++;
for (NavigableMap<byte[], byte[]> nm : r.getNoVersionMap().values()) {
for (byte[] val : nm.values()) {
assertTrue(Bytes.equals(val, value(value)));
}
}
}
assertEquals(count, i);
} catch (IOException e) {
fail("Failed due to exception");
} finally {
if (t != null)
t.close();
}
}
use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.
the class TestImportExport method testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily.
@Test
public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
final TableName exportTable = TableName.valueOf(name.getMethodName());
HTableDescriptor desc = new HTableDescriptor(exportTable);
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE));
UTIL.getAdmin().createTable(desc);
Table exportT = UTIL.getConnection().getTable(exportTable);
//Add first version of QUAL
Put p = new Put(ROW1);
p.addColumn(FAMILYA, QUAL, now, QUAL);
exportT.put(p);
//Add Delete family marker
Delete d = new Delete(ROW1, now + 3);
exportT.delete(d);
//Add second version of QUAL
p = new Put(ROW1);
p.addColumn(FAMILYA, QUAL, now + 5, "s".getBytes());
exportT.put(p);
//Add second Delete family marker
d = new Delete(ROW1, now + 7);
exportT.delete(d);
String[] args = new String[] { "-D" + Export.RAW_SCAN + "=true", exportTable.getNameAsString(), FQ_OUTPUT_DIR, // max number of key versions per key to export
"1000" };
assertTrue(runExport(args));
final String importTable = name.getMethodName() + "import";
desc = new HTableDescriptor(TableName.valueOf(importTable));
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE));
UTIL.getAdmin().createTable(desc);
Table importT = UTIL.getConnection().getTable(TableName.valueOf(importTable));
args = new String[] { importTable, FQ_OUTPUT_DIR };
assertTrue(runImport(args));
Scan s = new Scan();
s.setMaxVersions();
s.setRaw(true);
ResultScanner importedTScanner = importT.getScanner(s);
Result importedTResult = importedTScanner.next();
ResultScanner exportedTScanner = exportT.getScanner(s);
Result exportedTResult = exportedTScanner.next();
try {
Result.compareResults(exportedTResult, importedTResult);
} catch (Exception e) {
fail("Original and imported tables data comparision failed with error:" + e.getMessage());
} finally {
exportT.close();
importT.close();
}
}
use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.
the class TestImportTSVWithVisibilityLabels method issueDeleteAndVerifyData.
private void issueDeleteAndVerifyData(TableName tableName) throws IOException {
LOG.debug("Validating table after delete.");
Table table = util.getConnection().getTable(tableName);
boolean verified = false;
long pause = conf.getLong("hbase.client.pause", 5 * 1000);
int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
for (int i = 0; i < numRetries; i++) {
try {
Delete d = new Delete(Bytes.toBytes("KEY"));
d.addFamily(Bytes.toBytes(FAMILY));
d.setCellVisibility(new CellVisibility("private&secret"));
table.delete(d);
Scan scan = new Scan();
// Scan entire family.
scan.addFamily(Bytes.toBytes(FAMILY));
scan.setAuthorizations(new Authorizations("secret", "private"));
ResultScanner resScanner = table.getScanner(scan);
Result[] next = resScanner.next(5);
assertEquals(0, next.length);
verified = true;
break;
} catch (NullPointerException e) {
// If here, a cell was empty. Presume its because updates came in
// after the scanner had been opened. Wait a while and retry.
}
try {
Thread.sleep(pause);
} catch (InterruptedException e) {
// continue
}
}
table.close();
assertTrue(verified);
}
use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.
the class TestImportTsv method validateTable.
/**
* Confirm ImportTsv via data in online table.
*/
private static void validateTable(Configuration conf, TableName tableName, String family, int valueMultiplier, boolean isDryRun) throws IOException {
LOG.debug("Validating table.");
Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName);
boolean verified = false;
long pause = conf.getLong("hbase.client.pause", 5 * 1000);
int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
for (int i = 0; i < numRetries; i++) {
try {
Scan scan = new Scan();
// Scan entire family.
scan.addFamily(Bytes.toBytes(family));
ResultScanner resScanner = table.getScanner(scan);
int numRows = 0;
for (Result res : resScanner) {
numRows++;
assertEquals(2, res.size());
List<Cell> kvs = res.listCells();
assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
// Only one result set is expected, so let it loop.
}
if (isDryRun) {
assertEquals(0, numRows);
} else {
assertEquals(1, numRows);
}
verified = true;
break;
} catch (NullPointerException e) {
// If here, a cell was empty. Presume its because updates came in
// after the scanner had been opened. Wait a while and retry.
}
try {
Thread.sleep(pause);
} catch (InterruptedException e) {
// continue
}
}
table.close();
connection.close();
assertTrue(verified);
}
Aggregations