use of org.apache.hadoop.hbase.filter.BinaryComparator in project hbase by apache.
the class TestHRegion method testCheckAndMutate_WithWrongValue.
@Test
public void testCheckAndMutate_WithWrongValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
try {
// Putting data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), put, true);
assertEquals(false, res);
// checkAndDelete with wrong value
Delete delete = new Delete(row1);
delete.addFamily(fam1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), put, true);
assertEquals(false, res);
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
use of org.apache.hadoop.hbase.filter.BinaryComparator in project hbase by apache.
the class TestHRegion method testCheckAndPut_ThatPutWasWritten.
@Test
public void testCheckAndPut_ThatPutWasWritten() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
byte[][] families = { fam1, fam2 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
try {
// Putting data in the key to check
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// Creating put to add
long ts = System.currentTimeMillis();
KeyValue kv = new KeyValue(row1, fam2, qf1, ts, KeyValue.Type.Put, val2);
put = new Put(row1);
put.add(kv);
// checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1), put, true);
assertEquals(true, res);
Get get = new Get(row1);
get.addColumn(fam2, qf1);
Cell[] actual = region.get(get).rawCells();
Cell[] expected = { kv };
assertEquals(expected.length, actual.length);
for (int i = 0; i < actual.length; i++) {
assertEquals(expected[i], actual[i]);
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
use of org.apache.hadoop.hbase.filter.BinaryComparator in project hbase by apache.
the class TestHRegion method testCheckAndMutate_WithEmptyRowValue.
// ////////////////////////////////////////////////////////////////////////////
// checkAndMutate tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testCheckAndMutate_WithEmptyRowValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] emptyVal = new byte[] {};
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
try {
// Putting empty data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, emptyVal);
// checkAndPut with empty value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), put, true);
assertTrue(res);
// Putting data in key
put = new Put(row1);
put.addColumn(fam1, qf1, val1);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), put, true);
assertTrue(res);
// not empty anymore
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), put, true);
assertFalse(res);
Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), delete, true);
assertFalse(res);
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1), put, true);
assertTrue(res);
// checkAndDelete with correct value
delete = new Delete(row1);
delete.addColumn(fam1, qf1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), delete, true);
assertTrue(res);
delete = new Delete(row1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), delete, true);
assertTrue(res);
// checkAndPut looking for a null value
put = new Put(row1);
put.addColumn(fam1, qf1, val1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new NullComparator(), put, true);
assertTrue(res);
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
use of org.apache.hadoop.hbase.filter.BinaryComparator in project hbase by apache.
the class TestHRegion method testFlushCacheWhileScanning.
/**
* Flushes the cache in a thread while scanning. The tests verify that the
* scan is coherent - e.g. the returned results are always of the same or
* later update as the previous results.
*
* @throws IOException
* scan / compact
* @throws InterruptedException
* thread join
*/
@Test
public void testFlushCacheWhileScanning() throws IOException, InterruptedException {
byte[] family = Bytes.toBytes("family");
int numRows = 1000;
int flushAndScanInterval = 10;
int compactInterval = 10 * flushAndScanInterval;
this.region = initHRegion(tableName, method, CONF, family);
FlushThread flushThread = new FlushThread();
try {
flushThread.start();
Scan scan = new Scan();
scan.addFamily(family);
scan.setFilter(new SingleColumnValueFilter(family, qual1, CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes(5L))));
int expectedCount = 0;
List<Cell> res = new ArrayList<>();
boolean toggle = true;
for (long i = 0; i < numRows; i++) {
Put put = new Put(Bytes.toBytes(i));
put.setDurability(Durability.SKIP_WAL);
put.addColumn(family, qual1, Bytes.toBytes(i % 10));
region.put(put);
if (i != 0 && i % compactInterval == 0) {
LOG.debug("iteration = " + i + " ts=" + System.currentTimeMillis());
region.compact(true);
}
if (i % 10 == 5L) {
expectedCount++;
}
if (i != 0 && i % flushAndScanInterval == 0) {
res.clear();
InternalScanner scanner = region.getScanner(scan);
if (toggle) {
flushThread.flush();
}
while (scanner.next(res)) ;
if (!toggle) {
flushThread.flush();
}
assertEquals("toggle=" + toggle + "i=" + i + " ts=" + System.currentTimeMillis(), expectedCount, res.size());
toggle = !toggle;
}
}
} finally {
try {
flushThread.done();
flushThread.join();
flushThread.checkNoError();
} catch (InterruptedException ie) {
LOG.warn("Caught exception when joining with flushThread", ie);
}
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
use of org.apache.hadoop.hbase.filter.BinaryComparator in project hbase by apache.
the class TestHRegion method testIndexesScanWithOneDeletedRow.
@Test
public void testIndexesScanWithOneDeletedRow() throws IOException {
byte[] family = Bytes.toBytes("family");
// Setting up region
this.region = initHRegion(tableName, method, CONF, family);
try {
Put put = new Put(Bytes.toBytes(1L));
put.addColumn(family, qual1, 1L, Bytes.toBytes(1L));
region.put(put);
region.flush(true);
Delete delete = new Delete(Bytes.toBytes(1L), 1L);
region.delete(delete);
put = new Put(Bytes.toBytes(2L));
put.addColumn(family, qual1, 2L, Bytes.toBytes(2L));
region.put(put);
Scan idxScan = new Scan();
idxScan.addFamily(family);
idxScan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL, Arrays.<Filter>asList(new SingleColumnValueFilter(family, qual1, CompareOp.GREATER_OR_EQUAL, new BinaryComparator(Bytes.toBytes(0L))), new SingleColumnValueFilter(family, qual1, CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L))))));
InternalScanner scanner = region.getScanner(idxScan);
List<Cell> res = new ArrayList<>();
while (scanner.next(res)) ;
assertEquals(1L, res.size());
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
Aggregations