use of com.baidu.hugegraph.structure.HugeEdge in project incubator-hugegraph by apache.
the class CachedGraphTransaction method queryEdgesFromBackend.
@Override
@Watched(prefix = "graphcache")
protected final Iterator<HugeEdge> queryEdgesFromBackend(Query query) {
RamTable ramtable = this.params().ramtable();
if (ramtable != null && ramtable.matched(query)) {
return ramtable.query(query);
}
if (!this.enableCacheEdge() || query.empty() || query.paging() || query.bigCapacity()) {
// Query all edges or query edges in paging, don't cache it
return super.queryEdgesFromBackend(query);
}
Id cacheKey = new QueryId(query);
Object value = this.edgesCache.get(cacheKey);
@SuppressWarnings("unchecked") Collection<HugeEdge> edges = (Collection<HugeEdge>) value;
if (value != null) {
for (HugeEdge edge : edges) {
if (edge.expired()) {
this.edgesCache.invalidate(cacheKey);
value = null;
}
}
}
if (value != null) {
// Not cached or the cache expired
return edges.iterator();
}
Iterator<HugeEdge> rs = super.queryEdgesFromBackend(query);
/*
* Iterator can't be cached, caching list instead
* there may be super node and too many edges in a query,
* try fetch a few of the head results and determine whether to cache.
*/
final int tryMax = 1 + MAX_CACHE_EDGES_PER_QUERY;
assert tryMax > MAX_CACHE_EDGES_PER_QUERY;
edges = new ArrayList<>(tryMax);
for (int i = 0; rs.hasNext() && i < tryMax; i++) {
edges.add(rs.next());
}
if (edges.size() == 0) {
this.edgesCache.update(cacheKey, Collections.emptyList());
} else if (edges.size() <= MAX_CACHE_EDGES_PER_QUERY) {
this.edgesCache.update(cacheKey, edges);
}
return new ExtendableIterator<>(edges.iterator(), rs);
}
use of com.baidu.hugegraph.structure.HugeEdge in project incubator-hugegraph by apache.
the class CustomizePathsTraverser method customizedPaths.
public List<Path> customizedPaths(Iterator<Vertex> vertices, List<WeightedEdgeStep> steps, boolean sorted, long capacity, long limit) {
E.checkArgument(vertices.hasNext(), "The source vertices can't be empty");
E.checkArgument(!steps.isEmpty(), "The steps can't be empty");
checkCapacity(capacity);
checkLimit(limit);
MultivaluedMap<Id, Node> sources = newMultivalueMap();
while (vertices.hasNext()) {
HugeVertex vertex = (HugeVertex) vertices.next();
Node node = sorted ? new WeightNode(vertex.id(), null, 0) : new Node(vertex.id(), null);
sources.add(vertex.id(), node);
}
int stepNum = steps.size();
int pathCount = 0;
long access = 0;
MultivaluedMap<Id, Node> newVertices = null;
root: for (WeightedEdgeStep step : steps) {
stepNum--;
newVertices = newMultivalueMap();
Iterator<Edge> edges;
// Traversal vertices of previous level
for (Map.Entry<Id, List<Node>> entry : sources.entrySet()) {
List<Node> adjacency = newList();
edges = this.edgesOfVertex(entry.getKey(), step.step());
while (edges.hasNext()) {
HugeEdge edge = (HugeEdge) edges.next();
Id target = edge.id().otherVertexId();
for (Node n : entry.getValue()) {
// If have loop, skip target
if (n.contains(target)) {
continue;
}
Node newNode;
if (sorted) {
double w = step.weightBy() != null ? edge.value(step.weightBy().name()) : step.defaultWeight();
newNode = new WeightNode(target, n, w);
} else {
newNode = new Node(target, n);
}
adjacency.add(newNode);
checkCapacity(capacity, ++access, "customized paths");
}
}
if (step.sample() > 0) {
// Sample current node's adjacent nodes
adjacency = sample(adjacency, step.sample());
}
// Add current node's adjacent nodes
for (Node node : adjacency) {
newVertices.add(node.id(), node);
// Avoid exceeding limit
if (stepNum == 0) {
if (limit != NO_LIMIT && !sorted && ++pathCount >= limit) {
break root;
}
}
}
}
// Re-init sources
sources = newVertices;
}
if (stepNum != 0) {
return ImmutableList.of();
}
List<Path> paths = newList();
for (List<Node> nodes : newVertices.values()) {
for (Node n : nodes) {
if (sorted) {
WeightNode wn = (WeightNode) n;
paths.add(new WeightPath(wn.path(), wn.weights()));
} else {
paths.add(new Path(n.path()));
}
}
}
return paths;
}
use of com.baidu.hugegraph.structure.HugeEdge in project incubator-hugegraph by apache.
the class CountTraverser method count.
public long count(Id source, List<EdgeStep> steps, boolean containsTraversed, long dedupSize) {
E.checkNotNull(source, "source vertex id");
this.checkVertexExist(source, "source vertex");
E.checkArgument(steps != null && !steps.isEmpty(), "The steps can't be empty");
checkDedupSize(dedupSize);
this.containsTraversed = containsTraversed;
this.dedupSize = dedupSize;
if (this.containsTraversed) {
this.count.increment();
}
int stepNum = steps.size();
EdgeStep firstStep = steps.get(0);
if (stepNum == 1) {
// Just one step, query count and return
long edgesCount = this.edgesCount(source, firstStep);
this.count.add(edgesCount);
return this.count.longValue();
}
// Multiple steps, construct first step to iterator
Iterator<Edge> edges = this.edgesOfVertexWithCount(source, firstStep);
// Wrap steps to Iterator except last step
for (int i = 1; i < stepNum - 1; i++) {
EdgeStep currentStep = steps.get(i);
edges = new FlatMapperIterator<>(edges, (edge) -> {
Id target = ((HugeEdge) edge).id().otherVertexId();
return this.edgesOfVertexWithCount(target, currentStep);
});
}
// The last step, just query count
EdgeStep lastStep = steps.get(stepNum - 1);
while (edges.hasNext()) {
Id target = ((HugeEdge) edges.next()).id().otherVertexId();
if (this.dedup(target)) {
continue;
}
// Count last layer vertices(without dedup size)
long edgesCount = this.edgesCount(target, lastStep);
this.count.add(edgesCount);
}
return this.count.longValue();
}
use of com.baidu.hugegraph.structure.HugeEdge in project incubator-hugegraph by apache.
the class KoutTraverser method customizedKout.
public KoutRecords customizedKout(Id source, EdgeStep step, int maxDepth, boolean nearest, long capacity, long limit) {
E.checkNotNull(source, "source vertex id");
this.checkVertexExist(source, "source vertex");
checkPositive(maxDepth, "k-out max_depth");
checkCapacity(capacity);
checkLimit(limit);
long[] depth = new long[1];
depth[0] = maxDepth;
boolean concurrent = maxDepth >= this.concurrentDepth();
KoutRecords records = new KoutRecords(concurrent, source, nearest);
Consumer<Id> consumer = v -> {
if (this.reachLimit(limit, depth[0], records.size())) {
return;
}
Iterator<Edge> edges = edgesOfVertex(v, step);
while (!this.reachLimit(limit, depth[0], records.size()) && edges.hasNext()) {
Id target = ((HugeEdge) edges.next()).id().otherVertexId();
records.addPath(v, target);
this.checkCapacity(capacity, records.accessed(), depth[0]);
}
};
while (depth[0]-- > 0) {
records.startOneLayer(true);
this.traverseIds(records.keys(), consumer, concurrent);
records.finishOneLayer();
}
return records;
}
use of com.baidu.hugegraph.structure.HugeEdge in project incubator-hugegraph by apache.
the class NeighborRankTraverser method neighborRank.
public List<Map<Id, Double>> neighborRank(Id source, List<Step> steps) {
E.checkNotNull(source, "source vertex id");
this.checkVertexExist(source, "source vertex");
E.checkArgument(!steps.isEmpty(), "The steps can't be empty");
MultivaluedMap<Id, Node> sources = newMultivalueMap();
sources.add(source, new Node(source, null));
boolean sameLayerTransfer = true;
long access = 0;
// Results: ranks of each layer
List<Ranks> ranks = newList();
ranks.add(Ranks.of(source, 1.0));
for (Step step : steps) {
Ranks lastLayerRanks = ranks.get(ranks.size() - 1);
Map<Id, Double> sameLayerIncrRanks = newMap();
List<Adjacencies> adjacencies = newList();
MultivaluedMap<Id, Node> newVertices = newMultivalueMap();
// Traversal vertices of previous level
for (Map.Entry<Id, List<Node>> entry : sources.entrySet()) {
Id vertex = entry.getKey();
Iterator<Edge> edges = this.edgesOfVertex(vertex, step.edgeStep);
Adjacencies adjacenciesV = new Adjacencies(vertex);
Set<Id> sameLayerNodesV = newIdSet();
Map<Integer, Set<Id>> prevLayerNodesV = newMap();
while (edges.hasNext()) {
HugeEdge edge = (HugeEdge) edges.next();
Id target = edge.id().otherVertexId();
// Determine whether it belongs to the same layer
if (this.belongToSameLayer(sources.keySet(), target, sameLayerNodesV)) {
continue;
}
/*
* Determine whether it belongs to the previous layers,
* if it belongs to, update the weight, but don't pass
* any more
*/
if (this.belongToPrevLayers(ranks, target, prevLayerNodesV)) {
continue;
}
for (Node n : entry.getValue()) {
// If have loop, skip target
if (n.contains(target)) {
continue;
}
Node newNode = new Node(target, n);
adjacenciesV.add(newNode);
// Add adjacent nodes to sources of next step
newVertices.add(target, newNode);
checkCapacity(this.capacity, ++access, "neighbor rank");
}
}
long degree = sameLayerNodesV.size() + prevLayerNodesV.size() + adjacenciesV.nodes().size();
if (degree == 0L) {
continue;
}
adjacenciesV.degree(degree);
adjacencies.add(adjacenciesV);
double incr = lastLayerRanks.getOrDefault(vertex, 0.0) * this.alpha / degree;
// Merge the increment of the same layer node
this.mergeSameLayerIncrRanks(sameLayerNodesV, incr, sameLayerIncrRanks);
// Adding contributions to the previous layers
this.contributePrevLayers(ranks, incr, prevLayerNodesV);
}
Ranks newLayerRanks;
if (sameLayerTransfer) {
// First contribute to last layer, then pass to the new layer
this.contributeLastLayer(sameLayerIncrRanks, lastLayerRanks);
newLayerRanks = this.contributeNewLayer(adjacencies, lastLayerRanks, step.capacity);
} else {
// First pass to the new layer, then contribute to last layer
newLayerRanks = this.contributeNewLayer(adjacencies, lastLayerRanks, step.capacity);
this.contributeLastLayer(sameLayerIncrRanks, lastLayerRanks);
}
ranks.add(newLayerRanks);
// Re-init sources
sources = newVertices;
}
return this.topRanks(ranks, steps);
}
Aggregations