use of com.tinkerpop.gremlin.java.GremlinPipeline in project orientdb by orientechnologies.
the class TestDirtyTrackingTreeRidBag method testConcurrentEdges.
@Test
public void testConcurrentEdges() {
final int max = OGlobalConfiguration.RID_BAG_EMBEDDED_TO_SBTREEBONSAI_THRESHOLD.getValueAsInteger() * 2;
final OrientBaseGraph graph = factory.getTx();
graph.executeOutsideTx(new OCallable<Object, OrientBaseGraph>() {
@Override
public Object call(OrientBaseGraph iArgument) {
graph.createVertexType("Vertex1");
graph.createVertexType("Vertex2");
graph.createEdgeType("Edge1");
return null;
}
});
Vertex rootVertex = graph.addVertex("Vertex1", (String) null);
graph.commit();
final Object rootId = rootVertex.getId();
Runnable tableRun = new Runnable() {
@Override
public void run() {
OrientBaseGraph innerGraph = TestDirtyTrackingTreeRidBag.this.factory.getTx();
Vertex innerRoot = innerGraph.getVertex(rootId);
for (int i = 0; i < max; i++) {
Vertex v = innerGraph.addVertex("Vertex2", (String) null);
innerGraph.commit();
graph.addEdge(null, innerRoot, v, "Edge1");
graph.commit();
}
}
};
Thread tableT = new Thread(tableRun);
tableT.start();
try {
tableT.join();
} catch (InterruptedException e) {
System.out.println("Join interrupted " + e);
}
graph.getRawGraph().getLocalCache().clear();
rootVertex = graph.getVertex(rootId);
assertEquals(new GremlinPipeline<Vertex, Long>().start(rootVertex).out("Edge1").count(), max);
graph.shutdown();
}
use of com.tinkerpop.gremlin.java.GremlinPipeline in project incubator-atlas by apache.
the class BaseQuery method buildPipeline.
protected GremlinPipeline buildPipeline() {
GremlinPipeline pipeline = getRootVertexPipeline();
Pipe queryPipe = getQueryPipe();
if (queryPipe != null) {
pipeline.add(queryPipe);
}
pipeline.add(getNotDeletedPipe());
return pipeline;
}
use of com.tinkerpop.gremlin.java.GremlinPipeline in project incubator-atlas by apache.
the class AtlasEntityQueryTest method testExecute_Collection.
//todo: add tests for instance query and getInitialPipeline()
@Test
public void testExecute_Collection() throws Exception {
AtlasGraph graph = createStrictMock(AtlasGraph.class);
QueryExpression expression = createStrictMock(QueryExpression.class);
ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
Request request = createStrictMock(Request.class);
GremlinPipeline initialPipeline = createStrictMock(GremlinPipeline.class);
Pipe queryPipe = createStrictMock(Pipe.class);
Pipe expressionPipe = createStrictMock(Pipe.class);
Pipe notDeletedPipe = createStrictMock(Pipe.class);
GremlinPipeline rootPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline queryPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline expressionPipeline = createStrictMock(GremlinPipeline.class);
GremlinPipeline notDeletedPipeline = createStrictMock(GremlinPipeline.class);
Vertex vertex1 = createStrictMock(Vertex.class);
VertexWrapper vertex1Wrapper = createStrictMock(VertexWrapper.class);
List<Vertex> results = new ArrayList<>();
results.add(vertex1);
Map<String, Object> vertex1PropertyMap = new HashMap<>();
vertex1PropertyMap.put("prop1", "prop1.value1");
vertex1PropertyMap.put("prop2", "prop2.value1");
Map<String, Object> filteredVertex1PropertyMap = new HashMap<>();
filteredVertex1PropertyMap.put("prop1", "prop1.value1");
// mock expectations
expect(initialPipeline.add(queryPipe)).andReturn(queryPipeline);
expect(initialPipeline.add(notDeletedPipe)).andReturn(notDeletedPipeline);
expect(initialPipeline.as("root")).andReturn(rootPipeline);
expect(expression.asPipe()).andReturn(expressionPipe);
expect(rootPipeline.add(expressionPipe)).andReturn(expressionPipeline);
expect(expressionPipeline.back("root")).andReturn(rootPipeline);
expect(rootPipeline.toList()).andReturn(results);
graph.commit();
expect(vertex1Wrapper.getPropertyMap()).andReturn(vertex1PropertyMap);
expect(resourceDefinition.filterProperties(request, vertex1PropertyMap)).andReturn(filteredVertex1PropertyMap);
expect(resourceDefinition.resolveHref(filteredVertex1PropertyMap)).andReturn("/foo/bar");
expect(request.getCardinality()).andReturn(Request.Cardinality.COLLECTION);
replay(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe, notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline, vertex1, vertex1Wrapper);
// end mock expectations
AtlasEntityQuery query = new TestAtlasEntityQuery(expression, resourceDefinition, request, initialPipeline, queryPipe, notDeletedPipe, graph, vertex1Wrapper);
// invoke method being tested
Collection<Map<String, Object>> queryResults = query.execute();
assertEquals(queryResults.size(), 1);
Map<String, Object> queryResultMap = queryResults.iterator().next();
assertEquals(queryResultMap.size(), 2);
assertEquals(queryResultMap.get("prop1"), "prop1.value1");
assertEquals(queryResultMap.get("href"), "/foo/bar");
verify(graph, expression, resourceDefinition, request, initialPipeline, queryPipe, expressionPipe, notDeletedPipe, rootPipeline, queryPipeline, expressionPipeline, notDeletedPipeline, vertex1, vertex1Wrapper);
}
Aggregations