use of org.apache.hadoop.hbase.filter.KeyOnlyFilter in project hbase by apache.
the class TestFromClientSide method testKeyOnlyFilterWithReverseScan.
@Test
public void testKeyOnlyFilterWithReverseScan() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table ht = TEST_UTIL.createTable(tableName, FAMILY);
byte[][] ROWS = makeN(ROW, 10);
byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"), Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"), Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"), Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"), Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>") };
for (int i = 0; i < 10; i++) {
Put put = new Put(ROWS[i]);
put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
scan.setReversed(true);
scan.addFamily(FAMILY);
Filter filter = new KeyOnlyFilter(true);
scan.setFilter(filter);
ResultScanner scanner = ht.getScanner(scan);
int count = 0;
for (Result result : ht.getScanner(scan)) {
assertEquals(result.size(), 1);
assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
count++;
}
assertEquals(count, 10);
scanner.close();
ht.close();
}
use of org.apache.hadoop.hbase.filter.KeyOnlyFilter in project hbase by apache.
the class TestGet method testDynamicFilter.
@Test
public void testDynamicFilter() throws Exception {
Configuration conf = HBaseConfiguration.create();
String localPath = conf.get("hbase.local.dir") + File.separator + "jars" + File.separator;
File jarFile = new File(localPath, "MockFilter.jar");
jarFile.delete();
assertFalse("Should be deleted: " + jarFile.getPath(), jarFile.exists());
ClientProtos.Get getProto1 = ClientProtos.Get.parseFrom(Base64.decode(PB_GET));
ClientProtos.Get getProto2 = ClientProtos.Get.parseFrom(Base64.decode(PB_GET_WITH_FILTER_LIST));
try {
ProtobufUtil.toGet(getProto1);
fail("Should not be able to load the filter class");
} catch (IOException ioe) {
assertTrue(ioe.getCause() instanceof ClassNotFoundException);
}
try {
ProtobufUtil.toGet(getProto2);
fail("Should not be able to load the filter class");
} catch (IOException ioe) {
assertTrue(ioe.getCause() instanceof InvocationTargetException);
InvocationTargetException ite = (InvocationTargetException) ioe.getCause();
assertTrue(ite.getTargetException() instanceof DeserializationException);
}
FileOutputStream fos = new FileOutputStream(jarFile);
fos.write(Base64.decode(MOCK_FILTER_JAR));
fos.close();
Get get1 = ProtobufUtil.toGet(getProto1);
assertEquals("test.MockFilter", get1.getFilter().getClass().getName());
Get get2 = ProtobufUtil.toGet(getProto2);
assertTrue(get2.getFilter() instanceof FilterList);
List<Filter> filters = ((FilterList) get2.getFilter()).getFilters();
assertEquals(3, filters.size());
assertEquals("test.MockFilter", filters.get(0).getClass().getName());
assertEquals("my.MockFilter", filters.get(1).getClass().getName());
assertTrue(filters.get(2) instanceof KeyOnlyFilter);
}
use of org.apache.hadoop.hbase.filter.KeyOnlyFilter in project hbase by apache.
the class TestFromClientSide method testKeyOnlyFilter.
@Test
public void testKeyOnlyFilter() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table ht = TEST_UTIL.createTable(tableName, FAMILY);
byte[][] ROWS = makeN(ROW, 10);
byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"), Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"), Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"), Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"), Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>") };
for (int i = 0; i < 10; i++) {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
scan.addFamily(FAMILY);
Filter filter = new KeyOnlyFilter(true);
scan.setFilter(filter);
ResultScanner scanner = ht.getScanner(scan);
int count = 0;
for (Result result : ht.getScanner(scan)) {
assertEquals(result.size(), 1);
assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
count++;
}
assertEquals(count, 10);
scanner.close();
}
use of org.apache.hadoop.hbase.filter.KeyOnlyFilter in project hive by apache.
the class HiveHBaseInputFormatUtil method getScan.
/**
* Parse {@code jobConf} to create a {@link Scan} instance.
*/
public static Scan getScan(JobConf jobConf) throws IOException {
String hbaseColumnsMapping = jobConf.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
boolean doColumnRegexMatching = jobConf.getBoolean(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, true);
List<Integer> readColIDs = ColumnProjectionUtils.getReadColumnIDs(jobConf);
ColumnMappings columnMappings;
try {
columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping, doColumnRegexMatching);
} catch (SerDeException e) {
throw new IOException(e);
}
if (columnMappings.size() < readColIDs.size()) {
throw new IOException("Cannot read more columns than the given table contains.");
}
boolean readAllColumns = ColumnProjectionUtils.isReadAllColumns(jobConf);
Scan scan = new Scan();
boolean empty = true;
// The list of families that have been added to the scan
List<String> addedFamilies = new ArrayList<String>();
if (!readAllColumns) {
ColumnMapping[] columnsMapping = columnMappings.getColumnsMapping();
for (int i : readColIDs) {
ColumnMapping colMap = columnsMapping[i];
if (colMap.hbaseRowKey || colMap.hbaseTimestamp) {
continue;
}
if (colMap.qualifierName == null) {
scan.addFamily(colMap.familyNameBytes);
addedFamilies.add(colMap.familyName);
} else {
if (!addedFamilies.contains(colMap.familyName)) {
// add only if the corresponding family has not already been added
scan.addColumn(colMap.familyNameBytes, colMap.qualifierNameBytes);
}
}
empty = false;
}
}
// count only on the keys
if (empty) {
if (readAllColumns) {
for (ColumnMapping colMap : columnMappings) {
if (colMap.hbaseRowKey || colMap.hbaseTimestamp) {
continue;
}
if (colMap.qualifierName == null) {
scan.addFamily(colMap.familyNameBytes);
} else {
scan.addColumn(colMap.familyNameBytes, colMap.qualifierNameBytes);
}
}
} else {
// Add a filter to just do a scan on the keys so that we pick up everything
scan.setFilter(new FilterList(new FirstKeyOnlyFilter(), new KeyOnlyFilter()));
}
}
String scanCache = jobConf.get(HBaseSerDe.HBASE_SCAN_CACHE);
if (scanCache != null) {
scan.setCaching(Integer.parseInt(scanCache));
}
String scanCacheBlocks = jobConf.get(HBaseSerDe.HBASE_SCAN_CACHEBLOCKS);
if (scanCacheBlocks != null) {
scan.setCacheBlocks(Boolean.parseBoolean(scanCacheBlocks));
}
String scanBatch = jobConf.get(HBaseSerDe.HBASE_SCAN_BATCH);
if (scanBatch != null) {
scan.setBatch(Integer.parseInt(scanBatch));
}
return scan;
}
Aggregations