use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class VertexAPI method list.
@GET
@Timed
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
@RolesAllowed({ "admin", "$owner=$graph $action=vertex_read" })
public String list(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("label") String label, @QueryParam("properties") String properties, @QueryParam("keep_start_p") @DefaultValue("false") boolean keepStartP, @QueryParam("offset") @DefaultValue("0") long offset, @QueryParam("page") String page, @QueryParam("limit") @DefaultValue("100") long limit) {
LOG.debug("Graph [{}] query vertices by label: {}, properties: {}, " + "offset: {}, page: {}, limit: {}", graph, label, properties, offset, page, limit);
Map<String, Object> props = parseProperties(properties);
if (page != null) {
E.checkArgument(offset == 0, "Not support querying vertices based on paging " + "and offset together");
}
HugeGraph g = graph(manager, graph);
GraphTraversal<Vertex, Vertex> traversal = g.traversal().V();
if (label != null) {
traversal = traversal.hasLabel(label);
}
// Convert relational operator like P.gt()/P.lt()
for (Map.Entry<String, Object> prop : props.entrySet()) {
Object value = prop.getValue();
if (!keepStartP && value instanceof String && ((String) value).startsWith(TraversalUtil.P_CALL)) {
prop.setValue(TraversalUtil.parsePredicate((String) value));
}
}
for (Map.Entry<String, Object> entry : props.entrySet()) {
traversal = traversal.has(entry.getKey(), entry.getValue());
}
if (page == null) {
traversal = traversal.range(offset, offset + limit);
} else {
traversal = traversal.has(QueryHolder.SYSPROP_PAGE, page).limit(limit);
}
try {
return manager.serializer(g).writeVertices(traversal, page != null);
} finally {
if (g.tx().isOpen()) {
g.tx().close();
}
}
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class GremlinAPI method post.
@POST
@Timed
@Compress
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON_WITH_CHARSET)
public Response post(@Context HugeConfig conf, @Context HttpHeaders headers, String request) {
/* The following code is reserved for forwarding request */
// context.getRequestDispatcher(location).forward(request, response);
// return Response.seeOther(UriBuilder.fromUri(location).build())
// .build();
// Response.temporaryRedirect(UriBuilder.fromUri(location).build())
// .build();
String auth = headers.getHeaderString(HttpHeaders.AUTHORIZATION);
Response response = this.client().doPostRequest(auth, request);
GREMLIN_INPUT_HISTOGRAM.update(request.length());
GREMLIN_OUTPUT_HISTOGRAM.update(response.getLength());
return transformResponseIfNeeded(response);
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class EdgesAPI method scan.
@GET
@Timed
@Path("scan")
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
public String scan(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("start") String start, @QueryParam("end") String end, @QueryParam("page") String page, @QueryParam("page_limit") @DefaultValue(DEFAULT_PAGE_LIMIT) long pageLimit) {
LOG.debug("Graph [{}] query edges by shard(start: {}, end: {}, " + "page: {}) ", graph, start, end, page);
HugeGraph g = graph(manager, graph);
ConditionQuery query = new ConditionQuery(HugeType.EDGE_OUT);
query.scan(start, end);
query.page(page);
if (query.paging()) {
query.limit(pageLimit);
}
Iterator<Edge> edges = g.edges(query);
return manager.serializer(g).writeEdges(edges, query.paging());
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class EdgesAPI method list.
@GET
@Timed
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
public String list(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("ids") List<String> stringIds) {
LOG.debug("Graph [{}] get edges by ids: {}", graph, stringIds);
E.checkArgument(stringIds != null && !stringIds.isEmpty(), "The ids parameter can't be null or empty");
Object[] ids = new Id[stringIds.size()];
for (int i = 0; i < ids.length; i++) {
ids[i] = HugeEdge.getIdValue(stringIds.get(i), false);
}
HugeGraph g = graph(manager, graph);
Iterator<Edge> edges = g.edges(ids);
return manager.serializer(g).writeEdges(edges, false);
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class VerticesAPI method list.
@GET
@Timed
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
public String list(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("ids") List<String> stringIds) {
LOG.debug("Graph [{}] get vertices by ids: {}", graph, stringIds);
E.checkArgument(stringIds != null && !stringIds.isEmpty(), "The ids parameter can't be null or empty");
Object[] ids = new Id[stringIds.size()];
for (int i = 0; i < ids.length; i++) {
ids[i] = VertexAPI.checkAndParseVertexId(stringIds.get(i));
}
HugeGraph g = graph(manager, graph);
Iterator<Vertex> vertices = g.vertices(ids);
return manager.serializer(g).writeVertices(vertices, false);
}
Aggregations