use of com.baidu.hugegraph.structure.gremlin.ResultSet in project incubator-hugegraph-toolchain by apache.
the class GremlinQueryService method executeQuery.
public GremlinResult executeQuery(int connId, GremlinQuery query) {
HugeClient client = this.getClient(connId);
log.debug("The original gremlin ==> {}", query.getContent());
String gremlin = this.optimize(query.getContent());
log.debug("The optimized gremlin ==> {}", gremlin);
// Execute gremlin query
ResultSet resultSet = this.executeGremlin(gremlin, client);
// Scan data, vote the result type
TypedResult typedResult = this.parseResults(resultSet);
// Build json view
JsonView jsonView = new JsonView(typedResult.getData());
// Build table view
TableView tableView = this.buildTableView(typedResult);
// Build graph view
GraphView graphView = this.buildGraphView(typedResult, client);
return GremlinResult.builder().type(typedResult.getType()).jsonView(jsonView).tableView(tableView).graphView(graphView).build();
}
use of com.baidu.hugegraph.structure.gremlin.ResultSet in project incubator-hugegraph-toolchain by apache.
the class EdgeTest method testQueryByPagingAndFiltering.
@Test
public void testQueryByPagingAndFiltering() {
SchemaManager schema = schema();
schema.propertyKey("no").asText().create();
schema.propertyKey("location").asText().create();
schema.propertyKey("callType").asText().create();
schema.propertyKey("calltime").asDate().create();
schema.propertyKey("duration").asInt().create();
schema.vertexLabel("phone").properties("no").primaryKeys("no").enableLabelIndex(false).create();
schema.edgeLabel("call").multiTimes().properties("location", "callType", "duration", "calltime").sourceLabel("phone").targetLabel("phone").sortKeys("location", "callType", "duration", "calltime").create();
Vertex v1 = graph().addVertex(T.label, "phone", "no", "13812345678");
Vertex v2 = graph().addVertex(T.label, "phone", "no", "13866668888");
Vertex v10086 = graph().addVertex(T.label, "phone", "no", "10086");
v1.addEdge("call", v2, "location", "Beijing", "callType", "work", "duration", 3, "calltime", "2017-5-1 23:00:00");
v1.addEdge("call", v2, "location", "Beijing", "callType", "work", "duration", 3, "calltime", "2017-5-2 12:00:01");
v1.addEdge("call", v2, "location", "Beijing", "callType", "work", "duration", 3, "calltime", "2017-5-3 12:08:02");
v1.addEdge("call", v2, "location", "Beijing", "callType", "work", "duration", 8, "calltime", "2017-5-3 22:22:03");
v1.addEdge("call", v2, "location", "Beijing", "callType", "fun", "duration", 10, "calltime", "2017-5-4 20:33:04");
v1.addEdge("call", v10086, "location", "Nanjing", "callType", "work", "duration", 12, "calltime", "2017-5-2 15:30:05");
v1.addEdge("call", v10086, "location", "Nanjing", "callType", "work", "duration", 14, "calltime", "2017-5-3 14:56:06");
v2.addEdge("call", v10086, "location", "Nanjing", "callType", "fun", "duration", 15, "calltime", "2017-5-3 17:28:07");
ResultSet resultSet = gremlin().gremlin("g.V(vid).outE('call')" + ".has('location', 'Beijing')" + ".has('callType', 'work')" + ".has('duration', 3)" + ".has('calltime', " + "P.between('2017-5-2', " + "'2017-5-4'))" + ".toList()").binding("vid", v1.id()).execute();
Iterator<Result> results = resultSet.iterator();
Assert.assertEquals(2, Iterators.size(results));
Assert.assertThrows(ServerException.class, () -> {
// no location
gremlin().gremlin("g.V(vid).outE('call').has('callType', 'work')" + ".has('duration', 3).has('calltime', " + "P.between('2017-5-2', '2017-5-4'))" + ".has('~page', '')").binding("vid", v1.id()).execute();
}, e -> {
Assert.assertContains("Can't query by paging and filtering", e.getMessage());
});
}
use of com.baidu.hugegraph.structure.gremlin.ResultSet in project incubator-hugegraph-toolchain by apache.
the class GraphsApiTest method testCloneAndDropGraph.
@Test
public void testCloneAndDropGraph() {
int initialGraphNumber = graphsAPI.list().size();
// Clone a new graph from exist a graph dynamically
String config;
try {
config = FileUtils.readFileToString(new File(CONFIG3_PATH), StandardCharsets.UTF_8);
} catch (IOException e) {
throw new ClientException("Failed to read config file: %s", CONFIG3_PATH);
}
Map<String, String> result = graphsAPI.create(GRAPH3, "hugegraph", config);
Assert.assertEquals(2, result.size());
Assert.assertEquals(GRAPH3, result.get("name"));
Assert.assertEquals("rocksdb", result.get("backend"));
Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
HugeClient client = new HugeClient(baseClient(), GRAPH3);
// Insert graph schema and data
initPropertyKey(client);
initVertexLabel(client);
initEdgeLabel(client);
List<Vertex> vertices = new ArrayList<>(100);
for (int i = 0; i < 100; i++) {
Vertex vertex = new Vertex("person").property("name", "person" + i).property("city", "Beijing").property("age", 19);
vertices.add(vertex);
}
vertices = client.graph().addVertices(vertices);
List<Edge> edges = new ArrayList<>(100);
for (int i = 0; i < 100; i++) {
Edge edge = new Edge("knows").source(vertices.get(i)).target(vertices.get((i + 1) % 100)).property("date", "2016-01-10");
edges.add(edge);
}
client.graph().addEdges(edges, false);
// Query vertices and edges count from new created graph
ResultSet resultSet = client.gremlin().gremlin("g.V().count()").execute();
Assert.assertEquals(100, resultSet.iterator().next().getInt());
resultSet = client.gremlin().gremlin("g.E().count()").execute();
Assert.assertEquals(100, resultSet.iterator().next().getInt());
// Clear graph schema and data from new created graph
graphsAPI.clear(GRAPH3, "I'm sure to delete all data");
resultSet = client.gremlin().gremlin("g.V().count()").execute();
Assert.assertEquals(0, resultSet.iterator().next().getInt());
resultSet = client.gremlin().gremlin("g.E().count()").execute();
Assert.assertEquals(0, resultSet.iterator().next().getInt());
Assert.assertTrue(client.schema().getPropertyKeys().isEmpty());
Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
// Remove new created graph dynamically
graphsAPI.drop(GRAPH3, "I'm sure to drop the graph");
Assert.assertEquals(initialGraphNumber, graphsAPI.list().size());
}
use of com.baidu.hugegraph.structure.gremlin.ResultSet in project incubator-hugegraph-toolchain by apache.
the class GraphsApiTest method testCloneAndDropGraphWithoutConfig.
@Test
public void testCloneAndDropGraphWithoutConfig() {
int initialGraphNumber = graphsAPI.list().size();
// Clone a new graph from exist a graph dynamically
String config = null;
Map<String, String> result = graphsAPI.create(GRAPH3, "hugegraph", config);
Assert.assertEquals(2, result.size());
Assert.assertEquals(GRAPH3, result.get("name"));
Assert.assertEquals("rocksdb", result.get("backend"));
Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
HugeClient client = new HugeClient(baseClient(), GRAPH3);
// Insert graph schema and data
initPropertyKey(client);
initVertexLabel(client);
initEdgeLabel(client);
List<Vertex> vertices = new ArrayList<>(100);
for (int i = 0; i < 100; i++) {
Vertex vertex = new Vertex("person").property("name", "person" + i).property("city", "Beijing").property("age", 19);
vertices.add(vertex);
}
vertices = client.graph().addVertices(vertices);
List<Edge> edges = new ArrayList<>(100);
for (int i = 0; i < 100; i++) {
Edge edge = new Edge("knows").source(vertices.get(i)).target(vertices.get((i + 1) % 100)).property("date", "2016-01-10");
edges.add(edge);
}
client.graph().addEdges(edges, false);
// Query vertices and edges count from new created graph
ResultSet resultSet = client.gremlin().gremlin("g.V().count()").execute();
Assert.assertEquals(100, resultSet.iterator().next().getInt());
resultSet = client.gremlin().gremlin("g.E().count()").execute();
Assert.assertEquals(100, resultSet.iterator().next().getInt());
// Clear graph schema and data from new created graph
graphsAPI.clear(GRAPH3, "I'm sure to delete all data");
resultSet = client.gremlin().gremlin("g.V().count()").execute();
Assert.assertEquals(0, resultSet.iterator().next().getInt());
resultSet = client.gremlin().gremlin("g.E().count()").execute();
Assert.assertEquals(0, resultSet.iterator().next().getInt());
Assert.assertTrue(client.schema().getPropertyKeys().isEmpty());
Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
// Remove new created graph dynamically
graphsAPI.drop(GRAPH3, "I'm sure to drop the graph");
Assert.assertEquals(initialGraphNumber, graphsAPI.list().size());
}
use of com.baidu.hugegraph.structure.gremlin.ResultSet in project incubator-hugegraph-toolchain by apache.
the class TaskApiTest method testCancel.
@Test
public void testCancel() {
schema().vertexLabel("man").useAutomaticId().ifNotExist().create();
String groovy = "for (int i = 0; i < 10; i++) {" + "hugegraph.addVertex(T.label, 'man');" + "hugegraph.tx().commit();" + "}";
// Insert 10 records in sync mode
GremlinRequest request = new GremlinRequest(groovy);
gremlin().execute(request);
// Verify insertion takes effect
groovy = "g.V()";
request = new GremlinRequest(groovy);
ResultSet resultSet = gremlin().execute(request);
Assert.assertEquals(10, resultSet.size());
// Delete to prepare for insertion in async mode
groovy = "g.V().drop()";
request = new GremlinRequest(groovy);
gremlin().execute(request);
/*
* The asyn task scripts need to be able to handle interrupts,
* otherwise they cannot be cancelled
*/
groovy = "for (int i = 0; i < 10; i++) {" + "hugegraph.addVertex(T.label, 'man');" + "hugegraph.tx().commit();" + "try {" + "sleep(1000);" + "} catch (InterruptedException e) {" + "break;" + "}" + "}";
request = new GremlinRequest(groovy);
long taskId = gremlin().executeAsTask(request);
groovy = "g.V()";
request = new GremlinRequest(groovy);
// Wait async task running
while (true) {
resultSet = gremlin().execute(request);
if (resultSet.size() > 0) {
break;
} else {
try {
Thread.sleep(1000);
} catch (InterruptedException ignored) {
}
}
}
// Cancel async task
Task task = taskAPI.cancel(taskId);
Assert.assertTrue(task.cancelling());
try {
Thread.sleep(1000L);
} catch (InterruptedException e) {
// ignored
}
task = taskAPI.get(taskId);
Assert.assertTrue(task.cancelled());
resultSet = gremlin().execute(request);
Assert.assertTrue(resultSet.size() < 10);
}
Aggregations