use of org.elasticsearch.common.document.DocumentField in project datashare by ICIJ.
the class ElasticsearchIndexer method get.
@Override
public <T extends Entity> T get(String indexName, String id, String root) {
String type = null;
try {
final GetRequest req = new GetRequest(indexName, id).routing(root);
final GetResponse resp = client.get(req, RequestOptions.DEFAULT);
if (resp.isExists()) {
Map<String, Object> sourceAsMap = resp.getSourceAsMap();
sourceAsMap.put("rootDocument", ofNullable(resp.getFields().get("_routing")).orElse(new DocumentField("_routing", Collections.singletonList(id))).getValues().get(0));
type = (String) sourceAsMap.get(esCfg.docTypeField);
Class<T> tClass = (Class<T>) Class.forName("org.icij.datashare.text." + type);
return JsonObjectMapper.getObject(id, sourceAsMap, tClass);
}
} catch (IOException e) {
LOGGER.error("Failed to get entity " + id + " in index " + indexName, e);
} catch (ClassNotFoundException e) {
LOGGER.error("no entity for type " + type);
}
return null;
}
use of org.elasticsearch.common.document.DocumentField in project sonarqube by SonarSource.
the class BulkIndexer method addDeletion.
public void addDeletion(SearchRequest searchRequest) {
// TODO to be replaced by delete_by_query that is back in ES5
searchRequest.scroll(TimeValue.timeValueMinutes(5)).source().sort("_doc", SortOrder.ASC).size(100).fetchSource(false);
// this search is synchronous. An optimization would be to be non-blocking,
// but it requires to tracking pending requests in close().
// Same semaphore can't be reused because of potential deadlock (requires to acquire
// two locks)
SearchResponse searchResponse = esClient.search(searchRequest);
while (true) {
SearchHit[] hits = searchResponse.getHits().getHits();
for (SearchHit hit : hits) {
DocumentField routing = hit.field("_routing");
DeleteRequest deleteRequest = new DeleteRequest(hit.getIndex(), hit.getType(), hit.getId());
if (routing != null) {
deleteRequest.routing(routing.getValue());
}
add(deleteRequest);
}
String scrollId = searchResponse.getScrollId();
if (scrollId == null) {
break;
}
searchResponse = esClient.scroll(new SearchScrollRequest(scrollId).scroll(TimeValue.timeValueMinutes(5)));
if (hits.length == 0) {
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
clearScrollRequest.addScrollId(scrollId);
esClient.clearScroll(clearScrollRequest);
break;
}
}
}
use of org.elasticsearch.common.document.DocumentField in project spring-data-elasticsearch by spring-projects.
the class DocumentAdaptersUnitTests method searchResponseShouldReturnContainsKey.
// DATAES-628
@Test
public void searchResponseShouldReturnContainsKey() {
Map<String, DocumentField> fields = new LinkedHashMap<>();
fields.put("string", new DocumentField("string", Collections.singletonList("value")));
fields.put("bool", new DocumentField("bool", Arrays.asList(true, true, false)));
SearchHit searchHit = new SearchHit(123, "my-id", new Text("type"), fields, null);
SearchDocument document = DocumentAdapters.from(searchHit);
assertThat(document.containsKey("string")).isTrue();
assertThat(document.containsKey("not-set")).isFalse();
}
use of org.elasticsearch.common.document.DocumentField in project spring-data-elasticsearch by spring-projects.
the class DocumentAdapters method from.
/**
* Create a {@link SearchDocument} from {@link SearchHit}.
* <p>
* Returns a {@link SearchDocument} using the source if available.
*
* @param source the source {@link SearchHit}.
* @return the adapted {@link SearchDocument}.
*/
public static SearchDocument from(SearchHit source) {
Assert.notNull(source, "SearchHit must not be null");
Map<String, List<String>> highlightFields = new HashMap<>(//
source.getHighlightFields().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> Arrays.stream(entry.getValue().getFragments()).map(Text::string).collect(Collectors.toList()))));
Map<String, SearchDocumentResponse> innerHits = new LinkedHashMap<>();
Map<String, SearchHits> sourceInnerHits = source.getInnerHits();
if (sourceInnerHits != null) {
sourceInnerHits.forEach((name, searchHits) -> innerHits.put(name, SearchDocumentResponse.from(searchHits, null, null, null, searchDocument -> null)));
}
NestedMetaData nestedMetaData = from(source.getNestedIdentity());
Explanation explanation = from(source.getExplanation());
List<String> matchedQueries = from(source.getMatchedQueries());
BytesReference sourceRef = source.getSourceRef();
Map<String, DocumentField> sourceFields = source.getFields();
if (sourceRef == null || sourceRef.length() == 0) {
return new SearchDocumentAdapter(fromDocumentFields(source, source.getIndex(), source.getId(), source.getVersion(), source.getSeqNo(), source.getPrimaryTerm()), source.getScore(), source.getSortValues(), sourceFields, highlightFields, innerHits, nestedMetaData, explanation, matchedQueries);
}
Document document = Document.from(source.getSourceAsMap());
document.setIndex(source.getIndex());
document.setId(source.getId());
if (source.getVersion() >= 0) {
document.setVersion(source.getVersion());
}
document.setSeqNo(source.getSeqNo());
document.setPrimaryTerm(source.getPrimaryTerm());
return new SearchDocumentAdapter(document, source.getScore(), source.getSortValues(), sourceFields, highlightFields, innerHits, nestedMetaData, explanation, matchedQueries);
}
use of org.elasticsearch.common.document.DocumentField in project urban-eureka by errir503.
the class TimestampDecoder method decode.
@Override
public void decode(SearchHit hit, Supplier<Object> getter, BlockBuilder output) {
DocumentField documentField = hit.getFields().get(path);
Object value = null;
if (documentField != null) {
if (documentField.getValues().size() > 1) {
throw new PrestoException(ELASTICSEARCH_TYPE_MISMATCH, format("Expected single value for column '%s', found: %s", path, documentField.getValues().size()));
}
value = documentField.getValue();
} else {
value = getter.get();
}
if (value == null) {
output.appendNull();
} else {
LocalDateTime timestamp;
if (value instanceof String) {
timestamp = ISO_DATE_TIME.parse((String) value, LocalDateTime::from);
} else if (value instanceof Number) {
timestamp = LocalDateTime.ofInstant(Instant.ofEpochMilli(((Number) value).longValue()), ZULU);
} else {
throw new PrestoException(NOT_SUPPORTED, format("Unsupported representation for field '%s' of type TIMESTAMP: %s [%s]", path, value.getClass().getSimpleName(), value));
}
long epochMillis = timestamp.atZone(zoneId).toInstant().toEpochMilli();
TIMESTAMP.writeLong(output, epochMillis);
}
}
Aggregations