use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class IndexResponseTests method testToAndFromXContent.
public void testToAndFromXContent() throws IOException {
final Tuple<IndexResponse, IndexResponse> tuple = randomIndexResponse();
IndexResponse indexResponse = tuple.v1();
IndexResponse expectedIndexResponse = tuple.v2();
boolean humanReadable = randomBoolean();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference indexResponseBytes = toXContent(indexResponse, xContentType, humanReadable);
// Shuffle the XContent fields
if (randomBoolean()) {
try (XContentParser parser = createParser(xContentType.xContent(), indexResponseBytes)) {
indexResponseBytes = shuffleXContent(parser, randomBoolean()).bytes();
}
}
// Parse the XContent bytes to obtain a parsed
IndexResponse parsedIndexResponse;
try (XContentParser parser = createParser(xContentType.xContent(), indexResponseBytes)) {
parsedIndexResponse = IndexResponse.fromXContent(parser);
assertNull(parser.nextToken());
}
// We can't use equals() to compare the original and the parsed index response
// because the random index response can contain shard failures with exceptions,
// and those exceptions are not parsed back with the same types.
assertDocWriteResponse(expectedIndexResponse, parsedIndexResponse);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class ShardSearchFailureTests method testFromXContent.
public void testFromXContent() throws IOException {
ShardSearchFailure response = createTestItem();
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toXContent(response, xContentType, humanReadable);
ShardSearchFailure parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = ShardSearchFailure.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertEquals(response.index(), parsed.index());
assertEquals(response.shard().getNodeId(), parsed.shard().getNodeId());
assertEquals(response.shardId(), parsed.shardId());
// we cannot compare the cause, because it will be wrapped in an outer ElasticSearchException
// best effort: try to check that the original message appears somewhere in the rendered xContent
String originalMsg = response.getCause().getMessage();
assertEquals(parsed.getCause().getMessage(), "Elasticsearch exception [type=parsing_exception, reason=" + originalMsg + "]");
String nestedMsg = response.getCause().getCause().getMessage();
assertEquals(parsed.getCause().getCause().getMessage(), "Elasticsearch exception [type=illegal_argument_exception, reason=" + nestedMsg + "]");
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class IndexMetaDataTests method testIndexMetaDataSerialization.
public void testIndexMetaDataSerialization() throws IOException {
Integer numShard = randomFrom(1, 2, 4, 8, 16);
int numberOfReplicas = randomIntBetween(0, 10);
IndexMetaData metaData = IndexMetaData.builder("foo").settings(Settings.builder().put("index.version.created", 1).put("index.number_of_shards", numShard).put("index.number_of_replicas", numberOfReplicas).build()).creationDate(randomLong()).primaryTerm(0, 2).setRoutingNumShards(32).build();
final XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
metaData.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes());
final IndexMetaData fromXContentMeta = IndexMetaData.fromXContent(parser);
assertEquals(metaData, fromXContentMeta);
assertEquals(metaData.hashCode(), fromXContentMeta.hashCode());
assertEquals(metaData.getNumberOfReplicas(), fromXContentMeta.getNumberOfReplicas());
assertEquals(metaData.getNumberOfShards(), fromXContentMeta.getNumberOfShards());
assertEquals(metaData.getCreationVersion(), fromXContentMeta.getCreationVersion());
assertEquals(metaData.getRoutingNumShards(), fromXContentMeta.getRoutingNumShards());
assertEquals(metaData.getCreationDate(), fromXContentMeta.getCreationDate());
assertEquals(metaData.getRoutingFactor(), fromXContentMeta.getRoutingFactor());
assertEquals(metaData.primaryTerm(0), fromXContentMeta.primaryTerm(0));
final BytesStreamOutput out = new BytesStreamOutput();
metaData.writeTo(out);
IndexMetaData deserialized = IndexMetaData.readFrom(out.bytes().streamInput());
assertEquals(metaData, deserialized);
assertEquals(metaData.hashCode(), deserialized.hashCode());
assertEquals(metaData.getNumberOfReplicas(), deserialized.getNumberOfReplicas());
assertEquals(metaData.getNumberOfShards(), deserialized.getNumberOfShards());
assertEquals(metaData.getCreationVersion(), deserialized.getCreationVersion());
assertEquals(metaData.getRoutingNumShards(), deserialized.getRoutingNumShards());
assertEquals(metaData.getCreationDate(), deserialized.getCreationDate());
assertEquals(metaData.getRoutingFactor(), deserialized.getRoutingFactor());
assertEquals(metaData.primaryTerm(0), deserialized.primaryTerm(0));
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class Request method bulk.
static Request bulk(BulkRequest bulkRequest) throws IOException {
Params parameters = Params.builder();
parameters.withTimeout(bulkRequest.timeout());
parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy());
// Bulk API only supports newline delimited JSON or Smile. Before executing
// the bulk, we need to check that all requests have the same content-type
// and this content-type is supported by the Bulk API.
XContentType bulkContentType = null;
for (int i = 0; i < bulkRequest.numberOfActions(); i++) {
DocWriteRequest<?> request = bulkRequest.requests().get(i);
DocWriteRequest.OpType opType = request.opType();
if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
bulkContentType = enforceSameContentType((IndexRequest) request, bulkContentType);
} else if (opType == DocWriteRequest.OpType.UPDATE) {
UpdateRequest updateRequest = (UpdateRequest) request;
if (updateRequest.doc() != null) {
bulkContentType = enforceSameContentType(updateRequest.doc(), bulkContentType);
}
if (updateRequest.upsertRequest() != null) {
bulkContentType = enforceSameContentType(updateRequest.upsertRequest(), bulkContentType);
}
}
}
if (bulkContentType == null) {
bulkContentType = XContentType.JSON;
}
byte separator = bulkContentType.xContent().streamSeparator();
ContentType requestContentType = ContentType.create(bulkContentType.mediaType());
ByteArrayOutputStream content = new ByteArrayOutputStream();
for (DocWriteRequest<?> request : bulkRequest.requests()) {
DocWriteRequest.OpType opType = request.opType();
try (XContentBuilder metadata = XContentBuilder.builder(bulkContentType.xContent())) {
metadata.startObject();
{
metadata.startObject(opType.getLowercase());
if (Strings.hasLength(request.index())) {
metadata.field("_index", request.index());
}
if (Strings.hasLength(request.type())) {
metadata.field("_type", request.type());
}
if (Strings.hasLength(request.id())) {
metadata.field("_id", request.id());
}
if (Strings.hasLength(request.routing())) {
metadata.field("_routing", request.routing());
}
if (Strings.hasLength(request.parent())) {
metadata.field("_parent", request.parent());
}
if (request.version() != Versions.MATCH_ANY) {
metadata.field("_version", request.version());
}
VersionType versionType = request.versionType();
if (versionType != VersionType.INTERNAL) {
if (versionType == VersionType.EXTERNAL) {
metadata.field("_version_type", "external");
} else if (versionType == VersionType.EXTERNAL_GTE) {
metadata.field("_version_type", "external_gte");
} else if (versionType == VersionType.FORCE) {
metadata.field("_version_type", "force");
}
}
if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
IndexRequest indexRequest = (IndexRequest) request;
if (Strings.hasLength(indexRequest.getPipeline())) {
metadata.field("pipeline", indexRequest.getPipeline());
}
} else if (opType == DocWriteRequest.OpType.UPDATE) {
UpdateRequest updateRequest = (UpdateRequest) request;
if (updateRequest.retryOnConflict() > 0) {
metadata.field("_retry_on_conflict", updateRequest.retryOnConflict());
}
if (updateRequest.fetchSource() != null) {
metadata.field("_source", updateRequest.fetchSource());
}
}
metadata.endObject();
}
metadata.endObject();
BytesRef metadataSource = metadata.bytes().toBytesRef();
content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length);
content.write(separator);
}
BytesRef source = null;
if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
IndexRequest indexRequest = (IndexRequest) request;
BytesReference indexSource = indexRequest.source();
XContentType indexXContentType = indexRequest.getContentType();
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, indexSource, indexXContentType)) {
try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) {
builder.copyCurrentStructure(parser);
source = builder.bytes().toBytesRef();
}
}
} else if (opType == DocWriteRequest.OpType.UPDATE) {
source = XContentHelper.toXContent((UpdateRequest) request, bulkContentType, false).toBytesRef();
}
if (source != null) {
content.write(source.bytes, source.offset, source.length);
content.write(separator);
}
}
HttpEntity entity = new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType);
return new Request(HttpPost.METHOD_NAME, "/_bulk", parameters.getParams(), entity);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class BulkItemResponse method fromXContent.
/**
* Reads a {@link BulkItemResponse} from a {@link XContentParser}.
*
* @param parser the {@link XContentParser}
* @param id the id to assign to the parsed {@link BulkItemResponse}. It is usually the index of
* the item in the {@link BulkResponse#getItems} array.
*/
public static BulkItemResponse fromXContent(XContentParser parser, int id) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String currentFieldName = parser.currentName();
token = parser.nextToken();
final OpType opType = OpType.fromString(currentFieldName);
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
DocWriteResponse.Builder builder = null;
CheckedConsumer<XContentParser, IOException> itemParser = null;
if (opType == OpType.INDEX || opType == OpType.CREATE) {
final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder();
builder = indexResponseBuilder;
itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder);
} else if (opType == OpType.UPDATE) {
final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder();
builder = updateResponseBuilder;
itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder);
} else if (opType == OpType.DELETE) {
final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder();
builder = deleteResponseBuilder;
itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder);
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
RestStatus status = null;
ElasticsearchException exception = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
}
if (ERROR.equals(currentFieldName)) {
if (token == XContentParser.Token.START_OBJECT) {
exception = ElasticsearchException.fromXContent(parser);
}
} else if (STATUS.equals(currentFieldName)) {
if (token == XContentParser.Token.VALUE_NUMBER) {
status = RestStatus.fromCode(parser.intValue());
}
} else {
itemParser.accept(parser);
}
}
ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser::getTokenLocation);
token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser::getTokenLocation);
BulkItemResponse bulkItemResponse;
if (exception != null) {
Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getType(), builder.getId(), exception, status);
bulkItemResponse = new BulkItemResponse(id, opType, failure);
} else {
bulkItemResponse = new BulkItemResponse(id, opType, builder.build());
}
return bulkItemResponse;
}
Aggregations