use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project gradoop by dbs-leipzig.
the class HBaseLabelInTest method testToHBaseFilter.
/**
* Test the toHBaseFilter function
*/
@Test
public void testToHBaseFilter() {
String testLabel1 = "test1";
String testLabel2 = "test2";
HBaseLabelIn<EPGMEdge> edgeFilter = new HBaseLabelIn<>(testLabel1, testLabel2);
FilterList expectedFilterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
for (String label : Arrays.asList(testLabel2, testLabel1)) {
SingleColumnValueFilter valueFilter = new SingleColumnValueFilter(Bytes.toBytesBinary(CF_META), Bytes.toBytesBinary(COL_LABEL), CompareFilter.CompareOp.EQUAL, Bytes.toBytesBinary(label));
expectedFilterList.addFilter(valueFilter);
}
assertEquals(edgeFilter.toHBaseFilter(false).toString(), expectedFilterList.toString());
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project gradoop by dbs-leipzig.
the class HBasePropEqualsTest method testToHBaseFilter.
/**
* Test the toHBaseFilter function
*/
@Test(dataProvider = "property values")
public void testToHBaseFilter(String propertyKey, Object value) {
PropertyValue propertyValue = PropertyValue.create(value);
HBasePropEquals<EPGMVertex> vertexFilter = new HBasePropEquals<>(propertyKey, propertyValue);
FilterList expectedFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL);
SingleColumnValueFilter valueFilter = new SingleColumnValueFilter(Bytes.toBytesBinary(CF_PROPERTY_VALUE), Bytes.toBytesBinary(propertyKey), CompareFilter.CompareOp.EQUAL, PropertyValueUtils.BytesUtils.getRawBytesWithoutType(propertyValue));
// Define that the entire row will be skipped if the column is not found
valueFilter.setFilterIfMissing(true);
SingleColumnValueFilter typeFilter = new SingleColumnValueFilter(Bytes.toBytesBinary(CF_PROPERTY_TYPE), Bytes.toBytesBinary(propertyKey), CompareFilter.CompareOp.EQUAL, PropertyValueUtils.BytesUtils.getTypeByte(propertyValue));
// Define that the entire row will be skipped if the column is not found
typeFilter.setFilterIfMissing(true);
expectedFilter.addFilter(valueFilter);
expectedFilter.addFilter(typeFilter);
assertEquals(vertexFilter.toHBaseFilter(false).toString(), expectedFilter.toString(), "Failed during filter comparison for type [" + propertyValue.getType() + "].");
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project gradoop by dbs-leipzig.
the class HBasePropRegTest method testToHBaseFilter.
/**
* Test the toHBaseFilter function
*/
@Test
public void testToHBaseFilter() {
String key = "key";
Pattern pattern = Pattern.compile("^FooBar.*$");
HBasePropReg<EPGMVertex> vertexFilter = new HBasePropReg<>(key, pattern);
FilterList expectedFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL);
SingleColumnValueFilter valueFilter = new SingleColumnValueFilter(Bytes.toBytesBinary(CF_PROPERTY_VALUE), Bytes.toBytesBinary(key), CompareFilter.CompareOp.EQUAL, new RegexStringComparator(pattern.pattern()));
// Define that the entire row will be skipped if the column is not found
valueFilter.setFilterIfMissing(true);
SingleColumnValueFilter typeFilter = new SingleColumnValueFilter(Bytes.toBytesBinary(CF_PROPERTY_TYPE), Bytes.toBytesBinary(key), CompareFilter.CompareOp.EQUAL, new byte[] { Type.STRING.getTypeByte() });
// Define that the entire row will be skipped if the column is not found
typeFilter.setFilterIfMissing(true);
expectedFilter.addFilter(typeFilter);
expectedFilter.addFilter(valueFilter);
assertEquals(vertexFilter.toHBaseFilter(false).toString(), expectedFilter.toString(), "Failed during filter comparison for key [" + key + "].");
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project gradoop by dbs-leipzig.
the class HBaseFilterUtils method getPropRegFilter.
/**
* Creates a HBase Filter object representation of propReg predicate
*
* @param key the property key to filter for
* @param reg the pattern to search for
* @param negate flag to define if this filter should be negated
* @return the HBase filter representation
*/
public static Filter getPropRegFilter(@Nonnull String key, @Nonnull Pattern reg, boolean negate) {
// Handle negation
CompareFilter.CompareOp compareOp = negate ? CompareFilter.CompareOp.NOT_EQUAL : CompareFilter.CompareOp.EQUAL;
FilterList.Operator listOperator = negate ? FilterList.Operator.MUST_PASS_ONE : FilterList.Operator.MUST_PASS_ALL;
FilterList filterList = new FilterList(listOperator);
SingleColumnValueFilter valueFilter = new SingleColumnValueFilter(CF_PROPERTY_VALUE_BYTES, Bytes.toBytesBinary(key), compareOp, new RegexStringComparator(reg.pattern()));
// Define that the entire row will be skipped if the column is not found
valueFilter.setFilterIfMissing(true);
SingleColumnValueFilter typeFilter = new SingleColumnValueFilter(CF_PROPERTY_TYPE_BYTES, Bytes.toBytesBinary(key), compareOp, new byte[] { Type.STRING.getTypeByte() });
// Define that the entire row will be skipped if the column is not found
typeFilter.setFilterIfMissing(true);
filterList.addFilter(typeFilter);
filterList.addFilter(valueFilter);
return filterList;
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project java-demo by xiaofu.
the class ClientOp method scanerReturnRow.
public static List<String> scanerReturnRow(String tablename) throws IOException {
List<String> lists = Lists.newArrayList();
HTable table = null;
try {
table = new HTable(conf, tablename);
Scan s = new Scan();
s.addFamily(Bytes.toBytes("main"));
// s.addColumn(Bytes.toBytes("main"), Bytes.toBytes("_keyid"));
FilterList filterLists = new FilterList();
filterLists.addFilter(new KeyOnlyFilter());
filterLists.addFilter(new SingleColumnValueFilter());
s.setCaching(1000);
s.setCacheBlocks(false);
// s.setFilter(filterLists);
ResultScanner rs = table.getScanner(s);
int count = 0;
final int len = 50000;
for (Result r : rs) {
KeyValue[] kv = r.raw();
lists.add(new String(r.getRow()));
count++;
if (count == len)
break;
}
rs.close();
} catch (IOException e) {
e.printStackTrace();
} finally {
table.close();
}
return lists;
}
Aggregations