use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class EdgesAPI method shards.
@GET
@Timed
@Path("shards")
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
public String shards(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("split_size") long splitSize) {
LOG.debug("Graph [{}] get vertex shards with split size '{}'", graph, splitSize);
HugeGraph g = graph(manager, graph);
List<Shard> shards = g.metadata(HugeType.EDGE_OUT, "splits", splitSize);
return manager.serializer(g).writeList("shards", shards);
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class VerticesAPI method shards.
@GET
@Timed
@Path("shards")
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
public String shards(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("split_size") long splitSize) {
LOG.debug("Graph [{}] get vertex shards with split size '{}'", graph, splitSize);
HugeGraph g = graph(manager, graph);
List<Shard> shards = g.metadata(HugeType.VERTEX, "splits", splitSize);
return manager.serializer(g).writeList("shards", shards);
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class VerticesAPI method scan.
@GET
@Timed
@Path("scan")
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
public String scan(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("start") String start, @QueryParam("end") String end, @QueryParam("page") String page, @QueryParam("page_limit") @DefaultValue(DEFAULT_PAGE_LIMIT) long pageLimit) {
LOG.debug("Graph [{}] query vertices by shard(start: {}, end: {}, " + "page: {}) ", graph, start, end, page);
HugeGraph g = graph(manager, graph);
ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.scan(start, end);
query.page(page);
if (query.paging()) {
query.limit(pageLimit);
}
Iterator<Vertex> vertices = g.vertices(query);
return manager.serializer(g).writeVertices(vertices, query.paging());
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class GremlinAPI method get.
@GET
@Timed
@Compress(buffer = (1024 * 40))
@Produces(APPLICATION_JSON_WITH_CHARSET)
public Response get(@Context HugeConfig conf, @Context HttpHeaders headers, @Context UriInfo uriInfo) {
String auth = headers.getHeaderString(HttpHeaders.AUTHORIZATION);
String query = uriInfo.getRequestUri().getRawQuery();
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
Response response = this.client().doGetRequest(auth, params);
GREMLIN_INPUT_HISTOGRAM.update(query.length());
GREMLIN_OUTPUT_HISTOGRAM.update(response.getLength());
return transformResponseIfNeeded(response);
}
use of com.baidu.hugegraph.api.filter.CompressInterceptor.Compress in project incubator-hugegraph by apache.
the class EdgeAPI method list.
@GET
@Timed
@Compress
@Produces(APPLICATION_JSON_WITH_CHARSET)
@RolesAllowed({ "admin", "$owner=$graph $action=edge_read" })
public String list(@Context GraphManager manager, @PathParam("graph") String graph, @QueryParam("vertex_id") String vertexId, @QueryParam("direction") String direction, @QueryParam("label") String label, @QueryParam("properties") String properties, @QueryParam("keep_start_p") @DefaultValue("false") boolean keepStartP, @QueryParam("offset") @DefaultValue("0") long offset, @QueryParam("page") String page, @QueryParam("limit") @DefaultValue("100") long limit) {
LOG.debug("Graph [{}] query edges by vertex: {}, direction: {}, " + "label: {}, properties: {}, offset: {}, page: {}, limit: {}", graph, vertexId, direction, label, properties, offset, page, limit);
Map<String, Object> props = parseProperties(properties);
if (page != null) {
E.checkArgument(offset == 0, "Not support querying edges based on paging " + "and offset together");
}
Id vertex = VertexAPI.checkAndParseVertexId(vertexId);
Direction dir = parseDirection(direction);
HugeGraph g = graph(manager, graph);
GraphTraversal<?, Edge> traversal;
if (vertex != null) {
if (label != null) {
traversal = g.traversal().V(vertex).toE(dir, label);
} else {
traversal = g.traversal().V(vertex).toE(dir);
}
} else {
if (label != null) {
traversal = g.traversal().E().hasLabel(label);
} else {
traversal = g.traversal().E();
}
}
// Convert relational operator like P.gt()/P.lt()
for (Map.Entry<String, Object> prop : props.entrySet()) {
Object value = prop.getValue();
if (!keepStartP && value instanceof String && ((String) value).startsWith(TraversalUtil.P_CALL)) {
prop.setValue(TraversalUtil.parsePredicate((String) value));
}
}
for (Map.Entry<String, Object> entry : props.entrySet()) {
traversal = traversal.has(entry.getKey(), entry.getValue());
}
if (page == null) {
traversal = traversal.range(offset, offset + limit);
} else {
traversal = traversal.has(QueryHolder.SYSPROP_PAGE, page).limit(limit);
}
try {
return manager.serializer(g).writeEdges(traversal, page != null);
} finally {
if (g.tx().isOpen()) {
g.tx().close();
}
}
}
Aggregations