use of org.apache.accumulo.core.data.Key in project Gaffer by gchq.
the class Gaffer1BloomElementFunctorTest method shouldTransformRangeEntity.
@Test
public void shouldTransformRangeEntity() throws AccumuloElementConversionException {
// Create Range formed from one entity and shouldRetieveElementsInRangeBetweenSeeds
final Entity entity1 = new Entity(TestGroups.ENTITY);
entity1.setVertex(1);
final Key key1 = elementConverter.getKeyFromEntity(entity1);
final Range range1 = new Range(key1, true, key1, true);
final org.apache.hadoop.util.bloom.Key expectedBloomKey1 = new org.apache.hadoop.util.bloom.Key(Arrays.copyOf(key1.getRowData().getBackingArray(), key1.getRowData().getBackingArray().length));
assertTrue(elementFunctor.transform(range1).equals(expectedBloomKey1));
// Create Range formed from two entities and shouldRetieveElementsInRangeBetweenSeeds - should get null
final Entity entity2 = new Entity(TestGroups.ENTITY);
entity2.setVertex(2);
final Key key2 = elementConverter.getKeyFromEntity(entity2);
final Range range2 = new Range(key1, true, key2, true);
assertNull(elementFunctor.transform(range2));
}
use of org.apache.accumulo.core.data.Key in project Gaffer by gchq.
the class Gaffer1BloomElementFunctorTest method shouldTransformKeyEntity.
@Test
public void shouldTransformKeyEntity() throws AccumuloElementConversionException {
// Create Key formed from entity and shouldRetieveElementsInRangeBetweenSeeds
final Entity entity1 = new Entity(TestGroups.ENTITY);
entity1.setVertex(1);
final Key key1 = elementConverter.getKeyFromEntity(entity1);
final org.apache.hadoop.util.bloom.Key expectedBloomKey1 = new org.apache.hadoop.util.bloom.Key(elementFunctor.getVertexFromRangeKey(key1.getRowData().getBackingArray()));
assertEquals(expectedBloomKey1, elementFunctor.transform(key1));
}
use of org.apache.accumulo.core.data.Key in project Gaffer by gchq.
the class Gaffer1BloomElementFunctorTest method shouldTransformRangeEdge.
@Test
public void shouldTransformRangeEdge() throws AccumuloElementConversionException {
// Create Range formed from one edge and shouldRetieveElementsInRangeBetweenSeeds
final Edge edge1 = new Edge(TestGroups.EDGE);
edge1.setSource(1);
edge1.setDestination(2);
final Pair<Key> keys = elementConverter.getKeysFromEdge(edge1);
final Range range1 = new Range(keys.getFirst().getRow(), true, keys.getFirst().getRow(), true);
final org.apache.hadoop.util.bloom.Key expectedBloomKey1 = new org.apache.hadoop.util.bloom.Key(elementFunctor.getVertexFromRangeKey(keys.getFirst().getRowData().getBackingArray()));
assertEquals(expectedBloomKey1, elementFunctor.transform(range1));
final Range range2 = new Range(keys.getSecond().getRow(), true, keys.getSecond().getRow(), true);
final org.apache.hadoop.util.bloom.Key expectedBloomKey2 = new org.apache.hadoop.util.bloom.Key(elementFunctor.getVertexFromRangeKey(keys.getSecond().getRowData().getBackingArray()));
assertEquals(expectedBloomKey2, elementFunctor.transform(range2));
// Create Range formed from two keys and shouldRetieveElementsInRangeBetweenSeeds - should get null
final Range range3 = new Range(keys.getFirst().getRow(), true, keys.getSecond().getRow(), true);
assertNull(elementFunctor.transform(range3));
}
use of org.apache.accumulo.core.data.Key in project Gaffer by gchq.
the class Gaffer1BloomElementFunctorTest method shouldTransformRangeWhenRangeHasUnspecifiedStartOrEndKey.
@Test
public void shouldTransformRangeWhenRangeHasUnspecifiedStartOrEndKey() {
try {
// Create Range with unspecified start key and shouldRetieveElementsInRangeBetweenSeeds - should get null
final Edge edge1 = new Edge(TestGroups.EDGE);
edge1.setSource("3");
edge1.setDestination("4");
final Pair<Key> keys = elementConverter.getKeysFromEdge(edge1);
final Range range1 = new Range(null, true, keys.getFirst().getRow(), true);
assertNull(elementFunctor.transform(range1));
// Create Range with unspecified end key and shouldRetieveElementsInRangeBetweenSeeds - should get null
final Range range2 = new Range(keys.getFirst().getRow(), true, null, true);
assertNull(elementFunctor.transform(range2));
} catch (AccumuloElementConversionException e) {
fail("ConversionException " + e);
}
}
use of org.apache.accumulo.core.data.Key in project presto by prestodb.
the class IndexLookup method getIndexRanges.
private List<Range> getIndexRanges(String indexTable, Multimap<AccumuloColumnConstraint, Range> constraintRanges, Collection<Range> rowIDRanges, Authorizations auths) throws TableNotFoundException {
Set<Range> finalRanges = null;
// For each column/constraint pair
for (Entry<AccumuloColumnConstraint, Collection<Range>> constraintEntry : constraintRanges.asMap().entrySet()) {
// Create a batch scanner against the index table, setting the ranges
BatchScanner scanner = connector.createBatchScanner(indexTable, auths, 10);
scanner.setRanges(constraintEntry.getValue());
// Fetch the column family for this specific column
Text family = new Text(Indexer.getIndexColumnFamily(constraintEntry.getKey().getFamily().getBytes(UTF_8), constraintEntry.getKey().getQualifier().getBytes(UTF_8)).array());
scanner.fetchColumnFamily(family);
// For each entry in the scanner
Text tmpQualifier = new Text();
Set<Range> columnRanges = new HashSet<>();
for (Entry<Key, Value> entry : scanner) {
entry.getKey().getColumnQualifier(tmpQualifier);
// Add to our column ranges if it is in one of the row ID ranges
if (inRange(tmpQualifier, rowIDRanges)) {
columnRanges.add(new Range(tmpQualifier));
}
}
LOG.debug("Retrieved %d ranges for column %s", columnRanges.size(), constraintEntry.getKey().getName());
// If finalRanges is null, we have not yet added any column ranges
if (finalRanges == null) {
finalRanges = new HashSet<>();
finalRanges.addAll(columnRanges);
} else {
// Retain only the row IDs for this column that have already been added
// This is your set intersection operation!
finalRanges.retainAll(columnRanges);
}
// Close the scanner
scanner.close();
}
// Return the final ranges for all constraint pairs
if (finalRanges != null) {
return ImmutableList.copyOf(finalRanges);
} else {
return ImmutableList.of();
}
}
Aggregations