use of org.neo4j.procedure.Description in project neo4j-apoc-procedures by neo4j-contrib.
the class Cliques method cliquesWithNode.
@Deprecated
@Procedure
@Description("apoc.algo.cliquesWithNode(startNode, minSize) YIELD cliques - search the graph and return all maximal cliques that " + "are at least as large than the minimum size argument and contain this node ")
public Stream<CliqueResult> cliquesWithNode(@Name("startNode") Node startNode, @Name("minSize") Number size) {
HashMap<Long, Node> nodesToSearchFrom = new HashMap<>();
nodesToSearchFrom.put(startNode.getId(), startNode);
for (Relationship relationship : startNode.getRelationships()) {
Node otherNode = relationship.getOtherNode(startNode);
nodesToSearchFrom.put(otherNode.getId(), otherNode);
}
return find_clique(new HashMap<>(), nodesToSearchFrom, new HashMap<>()).stream().filter(cliqueResult -> cliqueResult.clique.size() >= size.intValue());
}
use of org.neo4j.procedure.Description in project neo4j-apoc-procedures by neo4j-contrib.
the class WeaklyConnectedComponents method wcc.
@Deprecated
@Procedure("apoc.algo.wcc")
@Description("CALL apoc.algo.wcc() YIELD number of weakly connected components")
public Stream<CCResult> wcc() {
List<List<CCVar>> results = new LinkedList<List<CCVar>>();
ResourceIterator<Node> nodes = db.getAllNodes().iterator();
PrimitiveLongSet allNodes = Primitive.longSet(0);
while (nodes.hasNext()) {
Node node = nodes.next();
if (node.getDegree() == 0) {
List<CCVar> result = new LinkedList<CCVar>();
result.add(new CCVar(node.getId() + "", node.getLabels().iterator().next().name()));
results.add(result);
} else {
allNodes.add(node.getId());
}
}
nodes.close();
PrimitiveLongIterator it = allNodes.iterator();
while (it.hasNext()) {
try {
long n = it.next();
List<CCVar> result = new LinkedList<CCVar>();
PrimitiveLongIterator reachableIDs = go(db.getNodeById(n), Direction.BOTH, result).iterator();
while (reachableIDs.hasNext()) {
long id = (long) reachableIDs.next();
allNodes.remove(id);
}
results.add(result);
} catch (NoSuchElementException e) {
break;
}
it = allNodes.iterator();
}
allNodes.close();
return results.stream().map((x) -> new CCResult(x.stream().map((z) -> new Long(z.getId())).collect(Collectors.toList()), x.stream().collect(Collectors.groupingBy(CCVar::getType)).entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().size()))));
}
use of org.neo4j.procedure.Description in project neo4j-apoc-procedures by neo4j-contrib.
the class Json method toTree.
@Procedure("apoc.convert.toTree")
@Description("apoc.convert.toTree([paths],[lowerCaseRels=true]) creates a stream of nested documents representing the at least one root of these paths")
public // todo optinally provide root node
Stream<MapResult> toTree(@Name("paths") List<Path> paths, @Name(value = "lowerCaseRels", defaultValue = "true") boolean lowerCaseRels) {
if (paths.isEmpty())
return Stream.of(new MapResult(Collections.emptyMap()));
Map<Long, Map<String, Object>> maps = new HashMap<>(paths.size() * 100);
for (Path path : paths) {
Iterator<PropertyContainer> it = path.iterator();
while (it.hasNext()) {
Node n = (Node) it.next();
Map<String, Object> nMap = maps.computeIfAbsent(n.getId(), (id) -> toMap(n));
if (it.hasNext()) {
Relationship r = (Relationship) it.next();
Node m = r.getOtherNode(n);
Map<String, Object> mMap = maps.computeIfAbsent(m.getId(), (id) -> toMap(m));
String typeName = r.getType().name();
if (lowerCaseRels)
typeName = typeName.toLowerCase();
mMap = addRelProperties(mMap, typeName, r);
// parent-[:HAS_CHILD]->(child) vs. (parent)<-[:PARENT_OF]-(child)
if (!nMap.containsKey(typeName))
nMap.put(typeName, new ArrayList<>(16));
List list = (List) nMap.get(typeName);
if (!list.contains(mMap))
// todo performance, use set instead and convert to map at the end?
list.add(mMap);
}
}
}
return paths.stream().map(Path::startNode).distinct().map(n -> maps.remove(n.getId())).map(m -> m == null ? Collections.<String, Object>emptyMap() : m).map(MapResult::new);
}
use of org.neo4j.procedure.Description in project neo4j-apoc-procedures by neo4j-contrib.
the class SchemaIndex method related.
@Procedure
@Description("apoc.index.relatedNodes([nodes],label,key,'<TYPE'/'TYPE>'/'TYPE',limit) yield node - schema range scan which keeps index order and adds limit and checks opposite node of relationship against the given set of nodes")
public Stream<NodeResult> related(@Name("nodes") List<Node> nodes, @Name("label") String label, @Name("key") String key, @Name("relationship") String relationship, @Name("limit") long limit) throws SchemaRuleNotFoundException, IndexNotFoundKernelException, IOException, DuplicateSchemaRuleException, IndexNotApplicableKernelException {
Set<Node> nodeSet = new HashSet<>(nodes);
Pair<RelationshipType, Direction> relTypeDirection = parse(relationship).get(0);
RelationshipType type = relTypeDirection.first();
Direction dir = relTypeDirection.other();
return queryForRange(label, key, Long.MIN_VALUE, Long.MAX_VALUE, 0).filter((node) -> {
for (Relationship rel : node.getRelationships(dir, type)) {
Node other = rel.getOtherNode(node);
if (nodeSet.contains(other)) {
return true;
}
}
return false;
}).map(NodeResult::new).limit(limit);
}
use of org.neo4j.procedure.Description in project neo4j-apoc-procedures by neo4j-contrib.
the class Gephi method add.
// http://127.0.0.1:8080/workspace0?operation=updateGraph
// TODO configure property-filters or transfer all properties
@Procedure
@Description("apoc.gephi.add(url-or-key, workspace, data, weightproperty, ['exportproperty']) | streams passed in data to Gephi")
public Stream<ProgressInfo> add(@Name("urlOrKey") String keyOrUrl, @Name("workspace") String workspace, @Name("data") Object data, @Name(value = "weightproperty", defaultValue = "null") String weightproperty, @Name(value = "exportproperties", defaultValue = "[]") List<String> exportproperties) {
if (workspace == null)
workspace = "workspace0";
String url = getGephiUrl(keyOrUrl) + "/" + Util.encodeUrlComponent(workspace) + "?operation=updateGraph";
long start = System.currentTimeMillis();
HashSet<Node> nodes = new HashSet<>(1000);
HashSet<Relationship> rels = new HashSet<>(10000);
List<String> propertyNames = new ArrayList<>(exportproperties);
propertyNames.removeAll(RESERVED);
if (Graphs.extract(data, nodes, rels)) {
String payload = toGephiStreaming(nodes, rels, weightproperty, propertyNames.toArray(new String[propertyNames.size()]));
JsonUtil.loadJson(url, map("method", "POST", "Content-Type", "application/json; charset=utf-8"), payload).count();
return Stream.of(new ProgressInfo(url, "graph", "gephi").update(nodes.size(), rels.size(), nodes.size()).done(start));
}
return Stream.empty();
}
Aggregations