use of org.graylog.shaded.elasticsearch7.org.elasticsearch.index.reindex.BulkByScrollResponse in project pancm_project by xuwujing.
the class IpHandler method updateByQuery.
/**
* @return boolean
* @Author pancm
* @Description 根据条件更新
* @Date 2019/3/21
* @Param []
*/
public static Map<String, Object> updateByQuery(String index, String type, QueryBuilder... queryBuilders) throws IOException {
if (index == null || type == null) {
return null;
}
Map<String, Object> map = new HashMap<>();
try {
UpdateByQueryRequest request = new UpdateByQueryRequest();
request.indices(index);
request.setDocTypes(type);
if (queryBuilders != null) {
for (QueryBuilder queryBuilder : queryBuilders) {
request.setQuery(queryBuilder);
}
}
// 同步执行
BulkByScrollResponse bulkResponse = client.updateByQuery(request, RequestOptions.DEFAULT);
// 响应结果处理
map.put("time", bulkResponse.getTook().getMillis());
map.put("total", bulkResponse.getTotal());
} finally {
if (isAutoClose) {
close();
}
}
return map;
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.index.reindex.BulkByScrollResponse in project pancm_project by xuwujing.
the class IpHandler method deleteByQuery.
/**
* @return Map
* @Author pancm
* @Description //根据条件删除数据
* @Date 2019/3/21
* @Param []
*/
public static Map<String, Object> deleteByQuery(String index, String type, QueryBuilder[] queryBuilders) throws IOException {
if (index == null || type == null || queryBuilders == null) {
return null;
}
Map<String, Object> map = new HashMap<>();
try {
DeleteByQueryRequest request = new DeleteByQueryRequest(index, type);
if (queryBuilders != null) {
for (QueryBuilder queryBuilder : queryBuilders) {
request.setQuery(queryBuilder);
}
}
// 同步执行
BulkByScrollResponse bulkResponse = client.deleteByQuery(request, RequestOptions.DEFAULT);
// 响应结果处理
map.put("time", bulkResponse.getTook().getMillis());
map.put("total", bulkResponse.getTotal());
} finally {
if (isAutoClose) {
close();
}
}
return map;
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.index.reindex.BulkByScrollResponse in project elasticsearch-indexing-proxy by codelibs.
the class ProxyActionFilter method getExecutor.
@SuppressWarnings("unchecked")
private <Request extends ActionRequest, Response extends ActionResponse> Supplier<Response> getExecutor(final Task task, final String action, final Request request) {
if (BulkAction.NAME.equals(action)) {
final long startTime = System.nanoTime();
int count = 0;
final BulkRequest req = (BulkRequest) request;
for (final DocWriteRequest<?> subReq : req.requests()) {
if (indexingProxyService.isTargetIndex(subReq.index())) {
count++;
}
}
if (count == 0) {
return null;
} else if (count != req.requests().size()) {
throw new ElasticsearchException("Mixed target requests. ({} != {})", count, req.requests().size());
}
return () -> {
final List<BulkItemResponse> responseList = new ArrayList<>(req.requests().size());
for (int i = 0; i < req.requests().size(); i++) {
final DocWriteRequest<?> dwr = req.requests().get(i);
if (dwr instanceof IndexRequest) {
final IndexRequest r = (IndexRequest) dwr;
final String id = r.id() == null ? INDEX_UUID : r.id();
final IndexResponse response = new IndexResponse(new ShardId(new Index(r.index(), INDEX_UUID), 0), r.type(), id, r.version(), true);
responseList.add(new BulkItemResponse(i, r.opType(), response));
} else if (dwr instanceof UpdateRequest) {
final UpdateRequest r = (UpdateRequest) dwr;
final String id = r.id() == null ? INDEX_UUID : r.id();
final UpdateResponse response = new UpdateResponse(new ShardId(new Index(r.index(), INDEX_UUID), 0), r.type(), id, r.version(), Result.CREATED);
responseList.add(new BulkItemResponse(i, r.opType(), response));
} else if (dwr instanceof DeleteRequest) {
final DeleteRequest r = (DeleteRequest) dwr;
final String id = r.id() == null ? INDEX_UUID : r.id();
final DeleteResponse response = new DeleteResponse(new ShardId(new Index(r.index(), INDEX_UUID), 0), r.type(), id, r.version(), true);
response.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, ReplicationResponse.EMPTY));
responseList.add(new BulkItemResponse(i, r.opType(), response));
} else {
responseList.add(new BulkItemResponse(i, dwr.opType(), new BulkItemResponse.Failure(dwr.index(), dwr.type(), dwr.id(), new ElasticsearchException("Unknown request: " + dwr))));
}
}
return (Response) new BulkResponse(responseList.toArray(new BulkItemResponse[responseList.size()]), (System.nanoTime() - startTime) / 1000000);
};
} else if (DeleteAction.NAME.equals(action)) {
final DeleteRequest req = (DeleteRequest) request;
if (!indexingProxyService.isTargetIndex(req.index())) {
return null;
}
return () -> {
final String id = req.id() == null ? INDEX_UUID : req.id();
final DeleteResponse res = new DeleteResponse(new ShardId(new Index(req.index(), INDEX_UUID), 0), req.type(), id, req.version(), true);
res.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, ReplicationResponse.EMPTY));
return (Response) res;
};
} else if (DeleteByQueryAction.NAME.equals(action)) {
final long startTime = System.nanoTime();
int count = 0;
final DeleteByQueryRequest req = (DeleteByQueryRequest) request;
for (final String index : req.indices()) {
if (indexingProxyService.isTargetIndex(index)) {
count++;
}
}
if (count == 0) {
return null;
} else if (count != req.indices().length) {
throw new ElasticsearchException("Mixed target requests. ({} != {})", count, req.indices().length);
}
return () -> {
return (Response) new BulkByScrollResponse(TimeValue.timeValueNanos(System.nanoTime() - startTime), new BulkByScrollTask.Status(null, 0, 0, 0, 0, 0, 0, 0, 0, 0, TimeValue.ZERO, 0, null, TimeValue.ZERO), Collections.emptyList(), Collections.emptyList(), false);
};
} else if (IndexAction.NAME.equals(action)) {
final IndexRequest req = (IndexRequest) request;
if (!indexingProxyService.isTargetIndex(req.index())) {
return null;
}
return () -> {
final String id = req.id() == null ? INDEX_UUID : req.id();
return (Response) new IndexResponse(new ShardId(new Index(req.index(), INDEX_UUID), 0), req.type(), id, req.version(), true);
};
} else if (UpdateAction.NAME.equals(action)) {
final UpdateRequest req = (UpdateRequest) request;
if (!indexingProxyService.isTargetIndex(req.index())) {
return null;
}
return () -> {
final String id = req.id() == null ? INDEX_UUID : req.id();
return (Response) new UpdateResponse(new ShardId(new Index(req.index(), INDEX_UUID), 0), req.type(), id, req.version(), Result.CREATED);
};
} else if (UpdateByQueryAction.NAME.equals(action)) {
final long startTime = System.nanoTime();
int count = 0;
final UpdateByQueryRequest req = (UpdateByQueryRequest) request;
for (final String index : req.indices()) {
if (indexingProxyService.isTargetIndex(index)) {
count++;
}
}
if (count == 0) {
return null;
} else if (count != req.indices().length) {
throw new ElasticsearchException("Mixed target requests. ({} != {})", count, req.indices().length);
}
return () -> {
return (Response) new BulkByScrollResponse(TimeValue.timeValueNanos(System.nanoTime() - startTime), new BulkByScrollTask.Status(null, 0, 0, 0, 0, 0, 0, 0, 0, 0, TimeValue.ZERO, 0, null, TimeValue.ZERO), Collections.emptyList(), Collections.emptyList(), false);
};
}
return null;
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.index.reindex.BulkByScrollResponse in project incubator-skywalking by apache.
the class SegmentDurationEsPersistenceDAO method deleteHistory.
@Override
public void deleteHistory(Long startTimestamp, Long endTimestamp) {
long startTimeBucket = TimeBucketUtils.INSTANCE.getMinuteTimeBucket(startTimestamp);
long endTimeBucket = TimeBucketUtils.INSTANCE.getMinuteTimeBucket(endTimestamp);
BulkByScrollResponse response = getClient().prepareDelete(QueryBuilders.rangeQuery(SegmentDurationTable.COLUMN_TIME_BUCKET).gte(startTimeBucket).lte(endTimeBucket), SegmentDurationTable.TABLE).get();
long deleted = response.getDeleted();
logger.info("Delete {} rows history from {} index.", deleted, SegmentDurationTable.TABLE);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.index.reindex.BulkByScrollResponse in project incubator-skywalking by apache.
the class ServiceAlarmListEsPersistenceDAO method deleteHistory.
@Override
public void deleteHistory(Long startTimestamp, Long endTimestamp) {
long startTimeBucket = TimeBucketUtils.INSTANCE.getMinuteTimeBucket(startTimestamp);
long endTimeBucket = TimeBucketUtils.INSTANCE.getMinuteTimeBucket(endTimestamp);
BulkByScrollResponse response = getClient().prepareDelete(QueryBuilders.rangeQuery(ServiceAlarmListTable.COLUMN_TIME_BUCKET).gte(startTimeBucket).lte(endTimeBucket), ServiceAlarmListTable.TABLE).get();
long deleted = response.getDeleted();
logger.info("Delete {} rows history from {} index.", deleted, ServiceAlarmListTable.TABLE);
}
Aggregations