use of org.elasticsearch.action.bulk.BulkResponse in project fess by codelibs.
the class SearchService method bulkUpdate.
public boolean bulkUpdate(final Consumer<BulkRequestBuilder> consumer) {
final BulkRequestBuilder builder = fessEsClient.prepareBulk();
consumer.accept(builder);
try {
final BulkResponse response = builder.execute().get();
if (response.hasFailures()) {
throw new FessEsClientException(response.buildFailureMessage());
} else {
return true;
}
} catch (InterruptedException | ExecutionException e) {
throw new FessEsClientException("Failed to update bulk data.", e);
}
}
use of org.elasticsearch.action.bulk.BulkResponse in project fess by codelibs.
the class FessEsClient method insertBulkData.
protected void insertBulkData(final FessConfig fessConfig, final String configIndex, final String configType, final String dataPath) {
try {
final BulkRequestBuilder builder = client.prepareBulk();
final ObjectMapper mapper = new ObjectMapper();
Arrays.stream(FileUtil.readUTF8(dataPath).split("\n")).reduce((prev, line) -> {
try {
if (StringUtil.isBlank(prev)) {
final Map<String, Map<String, String>> result = mapper.readValue(line, new TypeReference<Map<String, Map<String, String>>>() {
});
if (result.keySet().contains("index")) {
return line;
} else if (result.keySet().contains("update")) {
return line;
} else if (result.keySet().contains("delete")) {
return StringUtil.EMPTY;
}
} else {
final Map<String, Map<String, String>> result = mapper.readValue(prev, new TypeReference<Map<String, Map<String, String>>>() {
});
if (result.keySet().contains("index")) {
final IndexRequestBuilder requestBuilder = client.prepareIndex(configIndex, configType, result.get("index").get("_id")).setSource(line, XContentFactory.xContentType(line));
builder.add(requestBuilder);
}
}
} catch (final Exception e) {
logger.warn("Failed to parse " + dataPath);
}
return StringUtil.EMPTY;
});
final BulkResponse response = builder.execute().actionGet(fessConfig.getIndexBulkTimeout());
if (response.hasFailures()) {
logger.warn("Failed to register " + dataPath + ": " + response.buildFailureMessage());
}
} catch (final Exception e) {
logger.warn("Failed to create " + configIndex + "/" + configType + " mapping.");
}
}
use of org.elasticsearch.action.bulk.BulkResponse in project fess by codelibs.
the class FessEsClient method addAll.
public void addAll(final String index, final String type, final List<Map<String, Object>> docList) {
final FessConfig fessConfig = ComponentUtil.getFessConfig();
final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
for (final Map<String, Object> doc : docList) {
final Object id = doc.remove(fessConfig.getIndexFieldId());
bulkRequestBuilder.add(client.prepareIndex(index, type, id.toString()).setSource(new DocMap(doc)));
}
final BulkResponse response = bulkRequestBuilder.execute().actionGet(ComponentUtil.getFessConfig().getIndexBulkTimeout());
if (response.hasFailures()) {
if (logger.isDebugEnabled()) {
@SuppressWarnings("rawtypes") final List<DocWriteRequest> requests = bulkRequestBuilder.request().requests();
final BulkItemResponse[] items = response.getItems();
if (requests.size() == items.length) {
for (int i = 0; i < requests.size(); i++) {
final BulkItemResponse resp = items[i];
if (resp.isFailed() && resp.getFailure() != null) {
final DocWriteRequest<?> req = requests.get(i);
final Failure failure = resp.getFailure();
logger.debug("Failed Request: " + req + "\n=>" + failure.getMessage());
}
}
}
}
throw new FessEsClientException(response.buildFailureMessage());
}
}
use of org.elasticsearch.action.bulk.BulkResponse in project fess by codelibs.
the class EsDataStoreImpl method processData.
protected void processData(final DataConfig dataConfig, final IndexUpdateCallback callback, final Map<String, String> paramMap, final Map<String, String> scriptMap, final Map<String, Object> defaultDataMap, final long readInterval, final Client client) {
final boolean deleteProcessedDoc = paramMap.getOrDefault("delete.processed.doc", Constants.FALSE).equalsIgnoreCase(Constants.TRUE);
final String[] indices;
if (paramMap.containsKey(INDEX)) {
indices = paramMap.get(INDEX).trim().split(",");
} else {
indices = new String[] { "_all" };
}
final String scroll = paramMap.containsKey(SCROLL) ? paramMap.get(SCROLL).trim() : "1m";
final String timeout = paramMap.containsKey(TIMEOUT) ? paramMap.get(TIMEOUT).trim() : "1m";
final SearchRequestBuilder builder = client.prepareSearch(indices);
if (paramMap.containsKey(TYPE)) {
builder.setTypes(paramMap.get(TYPE).trim().split(","));
}
if (paramMap.containsKey(SIZE)) {
builder.setSize(Integer.parseInt(paramMap.get(SIZE)));
}
if (paramMap.containsKey(FIELDS)) {
builder.setFetchSource(paramMap.get(FIELDS).trim().split(","), null);
}
builder.setQuery(QueryBuilders.wrapperQuery(paramMap.containsKey(QUERY) ? paramMap.get(QUERY).trim() : "{\"match_all\":{}}"));
builder.setScroll(scroll);
builder.setPreference(paramMap.containsKey(PREFERENCE) ? paramMap.get(PREFERENCE).trim() : Constants.SEARCH_PREFERENCE_PRIMARY);
try {
SearchResponse response = builder.execute().actionGet(timeout);
String scrollId = response.getScrollId();
while (scrollId != null) {
final SearchHits searchHits = response.getHits();
final SearchHit[] hits = searchHits.getHits();
if (hits.length == 0) {
scrollId = null;
break;
}
boolean loop = true;
final BulkRequestBuilder bulkRequest = deleteProcessedDoc ? client.prepareBulk() : null;
for (final SearchHit hit : hits) {
if (!alive || !loop) {
break;
}
final Map<String, Object> dataMap = new HashMap<>();
dataMap.putAll(defaultDataMap);
final Map<String, Object> resultMap = new LinkedHashMap<>();
resultMap.putAll(paramMap);
resultMap.put("index", hit.getIndex());
resultMap.put("type", hit.getType());
resultMap.put("id", hit.getId());
resultMap.put("version", Long.valueOf(hit.getVersion()));
resultMap.put("hit", hit);
resultMap.put("source", hit.getSource());
resultMap.put("crawlingConfig", dataConfig);
if (logger.isDebugEnabled()) {
for (final Map.Entry<String, Object> entry : resultMap.entrySet()) {
logger.debug(entry.getKey() + "=" + entry.getValue());
}
}
final Map<String, Object> crawlingContext = new HashMap<>();
crawlingContext.put("doc", dataMap);
resultMap.put("crawlingContext", crawlingContext);
for (final Map.Entry<String, String> entry : scriptMap.entrySet()) {
final Object convertValue = convertValue(entry.getValue(), resultMap);
if (convertValue != null) {
dataMap.put(entry.getKey(), convertValue);
}
}
if (logger.isDebugEnabled()) {
for (final Map.Entry<String, Object> entry : dataMap.entrySet()) {
logger.debug(entry.getKey() + "=" + entry.getValue());
}
}
try {
callback.store(paramMap, dataMap);
} catch (final CrawlingAccessException e) {
logger.warn("Crawling Access Exception at : " + dataMap, e);
Throwable target = e;
if (target instanceof MultipleCrawlingAccessException) {
final Throwable[] causes = ((MultipleCrawlingAccessException) target).getCauses();
if (causes.length > 0) {
target = causes[causes.length - 1];
}
}
String errorName;
final Throwable cause = target.getCause();
if (cause != null) {
errorName = cause.getClass().getCanonicalName();
} else {
errorName = target.getClass().getCanonicalName();
}
String url;
if (target instanceof DataStoreCrawlingException) {
final DataStoreCrawlingException dce = (DataStoreCrawlingException) target;
url = dce.getUrl();
if (dce.aborted()) {
loop = false;
}
} else {
url = hit.getIndex() + "/" + hit.getType() + "/" + hit.getId();
}
final FailureUrlService failureUrlService = ComponentUtil.getComponent(FailureUrlService.class);
failureUrlService.store(dataConfig, errorName, url, target);
} catch (final Throwable t) {
logger.warn("Crawling Access Exception at : " + dataMap, t);
final String url = hit.getIndex() + "/" + hit.getType() + "/" + hit.getId();
final FailureUrlService failureUrlService = ComponentUtil.getComponent(FailureUrlService.class);
failureUrlService.store(dataConfig, t.getClass().getCanonicalName(), url, t);
}
if (bulkRequest != null) {
bulkRequest.add(client.prepareDelete(hit.getIndex(), hit.getType(), hit.getId()));
}
if (readInterval > 0) {
sleep(readInterval);
}
}
if (bulkRequest != null && bulkRequest.numberOfActions() > 0) {
final BulkResponse bulkResponse = bulkRequest.execute().actionGet(timeout);
if (bulkResponse.hasFailures()) {
logger.warn(bulkResponse.buildFailureMessage());
}
}
if (!alive) {
break;
}
response = client.prepareSearchScroll(scrollId).setScroll(scroll).execute().actionGet(timeout);
scrollId = response.getScrollId();
}
} catch (final Exception e) {
throw new DataStoreException("Failed to crawl data when acessing elasticsearch.", e);
}
}
use of org.elasticsearch.action.bulk.BulkResponse in project metron by apache.
the class ElasticsearchDao method batchUpdate.
@Override
public void batchUpdate(Map<Document, Optional<String>> updates) throws IOException {
String indexPostfix = ElasticsearchUtils.getIndexFormat(accessConfig.getGlobalConfigSupplier().get()).format(new Date());
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
// Get the indices we'll actually be using for each Document.
for (Map.Entry<Document, Optional<String>> updateEntry : updates.entrySet()) {
Document update = updateEntry.getKey();
String sensorType = update.getSensorType();
String indexName = getIndexName(update, updateEntry.getValue(), indexPostfix);
IndexRequest indexRequest = buildIndexRequest(update, sensorType, indexName);
bulkRequestBuilder.add(indexRequest);
}
BulkResponse bulkResponse = bulkRequestBuilder.get();
if (bulkResponse.hasFailures()) {
LOG.error("Bulk Request has failures: {}", bulkResponse.buildFailureMessage());
throw new IOException("ElasticsearchDao upsert failed: " + bulkResponse.buildFailureMessage());
}
}
Aggregations