use of org.opensearch.common.unit.TimeValue in project bw-calendar-engine by Bedework.
the class BwIndexEsImpl method fetchEntities.
private <T> List<T> fetchEntities(final String docType, final BuildEntity<T> be, final QueryBuilder filter, final int count) throws CalFacadeException {
requireDocType(docType);
int tries = 0;
final int ourCount;
if (count < 0) {
ourCount = maxFetchCount;
} else {
ourCount = Math.min(maxFetchCount, count);
}
final SearchSourceBuilder ssb = new SearchSourceBuilder().size(ourCount).query(filter);
final SearchRequest sr = new SearchRequest(targetIndex).source(ssb).scroll(new TimeValue(60000));
if (debug()) {
debug("fetchEntities: " + sr);
}
final List<T> res = new ArrayList<>();
try {
SearchResponse scrollResp = getClient().search(sr, RequestOptions.DEFAULT);
if (scrollResp.status() != RestStatus.OK) {
if (debug()) {
debug("Search returned status " + scrollResp.status());
}
}
for (; ; ) {
if (tries > absoluteMaxTries) {
// huge count or we screwed up
warn("Indexer: too many tries");
break;
}
if (scrollResp.status() != RestStatus.OK) {
if (debug()) {
debug("Search returned status " + scrollResp.status());
}
}
final SearchHits hits = scrollResp.getHits();
// Break condition: No hits are returned
if (hits.getHits().length == 0) {
break;
}
for (final SearchHit hit : hits) {
// Handle the hit...
final T ent = be.make(getEntityBuilder(hit.getSourceAsMap()), hit.getId());
if (ent == null) {
// No access
continue;
}
res.add(ent);
// ourPos++;
}
tries++;
final SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollResp.getScrollId());
scrollRequest.scroll(new TimeValue(60000));
scrollResp = getClient().scroll(scrollRequest, RequestOptions.DEFAULT);
}
} catch (final Throwable t) {
throw new CalFacadeException(t);
}
return res;
}
use of org.opensearch.common.unit.TimeValue in project bw-calendar-engine by Bedework.
the class BwIndexEsImpl method reindex.
@Override
public ReindexResponse reindex() {
if (currentReindexing == null) {
currentReindexing = new ReindexResponse(docType);
}
final ReindexResponse resp = currentReindexing;
if (resp.getStatus() == processing) {
return resp;
}
// Create a new index.
final String indexName;
try {
indexName = newIndex();
} catch (final Throwable t) {
return Response.error(resp, t);
}
// Only retrieve masters - we'll query for the overrides
final QueryBuilder qb = getFilters(RecurringRetrievalMode.entityOnly).getAllForReindex(docType);
// 1 minute
final int timeoutMillis = 60000;
final TimeValue tv = new TimeValue(timeoutMillis);
final int batchSize = 100;
final var clResp = sch.getClient();
if (!clResp.isOk()) {
return Response.fromResponse(resp, clResp);
}
final var cl = clResp.getEntity();
final BulkListener listener = new BulkListener();
final BulkProcessor.Builder builder = BulkProcessor.builder((request, bulkListener) -> cl.bulkAsync(request, RequestOptions.DEFAULT, bulkListener), listener);
final BulkProcessor bulkProcessor = builder.setBulkActions(batchSize).setConcurrentRequests(3).setFlushInterval(tv).build();
/*
SearchResponse scrollResp = cl.prepareSearch(targetIndex)
.setSearchType(SearchType.SCAN)
.setScroll(tv)
.setQuery(qb)
.setSize(batchSize)
.execute()
.actionGet(); //100 hits per shard will be returned for each scroll
*/
final SearchSourceBuilder ssb = new SearchSourceBuilder().size(batchSize).query(qb);
final SearchRequest sr = new SearchRequest(targetIndex).source(ssb).scroll(tv);
// Switch to new index
targetIndex = indexName;
try {
SearchResponse scrollResp = cl.search(sr, RequestOptions.DEFAULT);
if (scrollResp.status() != RestStatus.OK) {
if (debug()) {
debug("Search returned status " + scrollResp.status());
}
}
// Scroll until no hits are returned
while (true) {
for (final SearchHit hit : scrollResp.getHits().getHits()) {
resp.incProcessed();
if ((resp.getProcessed() % 250) == 0) {
info("processed " + docType + ": " + resp.getProcessed());
}
resp.getStats().inc(docToType.getOrDefault(docType, unreachableEntities));
final ReindexResponse.Failure hitResp = new ReindexResponse.Failure();
final Object entity = makeEntity(hitResp, hit, null);
if (entity == null) {
warn("Unable to build entity " + hit.getSourceAsString());
resp.incTotalFailed();
if (resp.getTotalFailed() < 50) {
resp.addFailure(hitResp);
}
continue;
}
if (entity instanceof BwShareableDbentity) {
final BwShareableDbentity<?> ent = (BwShareableDbentity<?>) entity;
principalHref = ent.getOwnerHref();
}
if (entity instanceof EventInfo) {
// This might be a single event or a recurring event.
final EventInfo ei = (EventInfo) entity;
final BwEvent ev = ei.getEvent();
if (ev.getRecurring()) {
resp.incRecurring();
}
if (!reindexEvent(hitResp, indexName, hit, ei, bulkProcessor)) {
warn("Unable to index event " + hit.getSourceAsString());
resp.incTotalFailed();
if (resp.getTotalFailed() < 50) {
resp.addFailure(hitResp);
}
}
} else {
final EsDocInfo doc = makeDoc(resp, entity);
if (doc == null) {
if (resp.getStatus() != ok) {
resp.addFailure(hitResp);
}
continue;
}
final IndexRequest request = new IndexRequest(indexName);
request.id(doc.getId());
request.source(doc.getSource());
bulkProcessor.add(request);
if (entity instanceof BwEventProperty) {
caches.put((BwEventProperty<?>) entity);
}
}
}
final SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollResp.getScrollId());
scrollRequest.scroll(tv);
scrollResp = getClient().scroll(scrollRequest, RequestOptions.DEFAULT);
// Break condition: No hits are returned
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
try {
bulkProcessor.awaitClose(10, TimeUnit.MINUTES);
} catch (final InterruptedException e) {
errorReturn(resp, "Final bulk close was interrupted. Records may be missing", failed);
}
} catch (final Throwable t) {
errorReturn(resp, t);
}
return resp;
}
use of org.opensearch.common.unit.TimeValue in project bw-calendar-engine by Bedework.
the class BwIndexEsImpl method multiFetch.
/*
private SearchHits multiColFetch(final List<String> hrefs)
throws CalFacadeException {
final int batchSize = hrefs.size();
final SearchRequestBuilder srb = getClient()
.prepareSearch(searchIndexes);
final TermsQueryBuilder tqb =
new TermsQueryBuilder(
ESQueryFilter.getJname(PropertyInfoIndex.HREF),
hrefs);
srb.setSearchType(SearchType.QUERY_THEN_FETCH)
.setQuery(tqb);
srb.setFrom(0);
srb.setSize(batchSize);
if (debug()) {
debug("MultiColFetch: targetIndex=" + targetIndex +
"; srb=" + srb);
}
final SearchResponse resp = srb.execute().actionGet();
if (resp.status() != RestStatus.OK) {
if (debug()) {
debug("Search returned status " + resp.status());
}
return null;
}
final SearchHits hits = resp.getHits();
if ((hits.getHits() == null) ||
(hits.getHits().length == 0)) {
return null;
}
//Break condition: No hits are returned
if (hits.getHits().length == 0) {
return null;
}
return hits;
}
*/
private List<SearchHit> multiFetch(final SearchHits hits, final RecurringRetrievalMode rmode) throws CalFacadeException {
// Make an ored filter from keys
// Dedup
final Set<String> hrefs = new TreeSet<>();
for (final SearchHit hit : hits) {
final String kval = hit.getId();
if (kval == null) {
throw new CalFacadeException("org.bedework.index.noitemkey");
}
final Map<String, Object> map = hit.getSourceAsMap();
final Object field = map.get(ESQueryFilter.hrefJname);
if (field == null) {
warn("Unable to get field " + ESQueryFilter.hrefJname + " from " + map);
} else {
hrefs.add(field.toString());
}
}
final int batchSize = 1000;
final List<SearchHit> res = new ArrayList<>();
final SearchSourceBuilder ssb = new SearchSourceBuilder().size(batchSize).query(getFilters(null).multiHref(hrefs, rmode));
final SearchRequest req = new SearchRequest(searchIndexes).searchType(SearchType.QUERY_THEN_FETCH).source(ssb).scroll(TimeValue.timeValueMinutes(1L));
if (debug()) {
debug("MultiFetch: targetIndex=" + targetIndex + "; ssb=" + ssb);
}
try {
SearchResponse resp = getClient().search(req, RequestOptions.DEFAULT);
int tries = 0;
for (; ; ) {
if (tries > absoluteMaxTries) {
// huge count or we screwed up
warn("Indexer: too many tries");
break;
}
if (resp.status() != RestStatus.OK) {
if (debug()) {
debug("Search returned status " + resp.status());
}
return null;
}
final SearchHit[] hits2 = resp.getHits().getHits();
if ((hits2 == null) || (hits2.length == 0)) {
// No more data - we're done
break;
}
res.addAll(Arrays.asList(hits2));
tries++;
final SearchScrollRequest scrollRequest = new SearchScrollRequest(resp.getScrollId());
scrollRequest.scroll(new TimeValue(60000));
resp = getClient().scroll(scrollRequest, RequestOptions.DEFAULT);
}
return res;
} catch (final Throwable t) {
throw new CalFacadeException(t);
}
}
use of org.opensearch.common.unit.TimeValue in project fess-crawler by codelibs.
the class EsUrlQueueService method updateSessionId.
@Override
public void updateSessionId(final String oldSessionId, final String newSessionId) {
SearchResponse response = getClient().get(c -> c.prepareSearch(index).setScroll(new TimeValue(scrollTimeout)).setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(SESSION_ID, oldSessionId))).setSize(scrollSize).execute());
String scrollId = response.getScrollId();
try {
while (scrollId != null) {
final SearchHits searchHits = response.getHits();
if (searchHits.getHits().length == 0) {
break;
}
final BulkResponse bulkResponse = getClient().get(c -> {
final BulkRequestBuilder builder = c.prepareBulk();
for (final SearchHit searchHit : searchHits) {
final UpdateRequestBuilder updateRequest = c.prepareUpdate().setIndex(index).setId(searchHit.getId()).setDoc(SESSION_ID, newSessionId);
builder.add(updateRequest);
}
return builder.execute();
});
if (bulkResponse.hasFailures()) {
throw new EsAccessException(bulkResponse.buildFailureMessage());
}
final String sid = scrollId;
response = getClient().get(c -> c.prepareSearchScroll(sid).setScroll(new TimeValue(scrollTimeout)).execute());
if (!scrollId.equals(response.getScrollId())) {
getClient().clearScroll(scrollId);
}
scrollId = response.getScrollId();
}
} finally {
getClient().clearScroll(scrollId);
}
}
use of org.opensearch.common.unit.TimeValue in project OpenSearch by opensearch-project.
the class SnapshotLifecycleStats method toXContent.
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(RETENTION_RUNS.getPreferredName(), this.retentionRunCount);
builder.field(RETENTION_FAILED.getPreferredName(), this.retentionFailedCount);
builder.field(RETENTION_TIMED_OUT.getPreferredName(), this.retentionTimedOut);
TimeValue retentionTime = TimeValue.timeValueMillis(this.retentionTimeMs);
builder.field(RETENTION_TIME.getPreferredName(), retentionTime);
builder.field(RETENTION_TIME_MILLIS.getPreferredName(), retentionTime.millis());
Map<String, SnapshotPolicyStats> metrics = getMetrics();
long totalTaken = metrics.values().stream().mapToLong(s -> s.snapshotsTaken).sum();
long totalFailed = metrics.values().stream().mapToLong(s -> s.snapshotsFailed).sum();
long totalDeleted = metrics.values().stream().mapToLong(s -> s.snapshotsDeleted).sum();
long totalDeleteFailures = metrics.values().stream().mapToLong(s -> s.snapshotDeleteFailures).sum();
builder.field(TOTAL_TAKEN.getPreferredName(), totalTaken);
builder.field(TOTAL_FAILED.getPreferredName(), totalFailed);
builder.field(TOTAL_DELETIONS.getPreferredName(), totalDeleted);
builder.field(TOTAL_DELETION_FAILURES.getPreferredName(), totalDeleteFailures);
builder.startObject(POLICY_STATS.getPreferredName());
for (Map.Entry<String, SnapshotPolicyStats> policy : metrics.entrySet()) {
SnapshotPolicyStats perPolicyMetrics = policy.getValue();
builder.startObject(perPolicyMetrics.policyId);
perPolicyMetrics.toXContent(builder, params);
builder.endObject();
}
builder.endObject();
builder.endObject();
return builder;
}
Aggregations