use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentType in project elasticsearch by elastic.
the class BulkResponseTests method testToAndFromXContent.
public void testToAndFromXContent() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
long took = randomFrom(randomNonNegativeLong(), -1L);
long ingestTook = randomFrom(randomNonNegativeLong(), NO_INGEST_TOOK);
int nbBulkItems = randomIntBetween(1, 10);
BulkItemResponse[] bulkItems = new BulkItemResponse[nbBulkItems];
BulkItemResponse[] expectedBulkItems = new BulkItemResponse[nbBulkItems];
for (int i = 0; i < nbBulkItems; i++) {
DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values());
if (frequently()) {
Tuple<? extends DocWriteResponse, ? extends DocWriteResponse> randomDocWriteResponses = null;
if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
randomDocWriteResponses = IndexResponseTests.randomIndexResponse();
} else if (opType == DocWriteRequest.OpType.DELETE) {
randomDocWriteResponses = DeleteResponseTests.randomDeleteResponse();
} else if (opType == DocWriteRequest.OpType.UPDATE) {
randomDocWriteResponses = UpdateResponseTests.randomUpdateResponse(xContentType);
} else {
fail("Test does not support opType [" + opType + "]");
}
bulkItems[i] = new BulkItemResponse(i, opType, randomDocWriteResponses.v1());
expectedBulkItems[i] = new BulkItemResponse(i, opType, randomDocWriteResponses.v2());
} else {
String index = randomAsciiOfLength(5);
String type = randomAsciiOfLength(5);
String id = randomAsciiOfLength(5);
Tuple<Throwable, ElasticsearchException> failures = randomExceptions();
Exception bulkItemCause = (Exception) failures.v1();
bulkItems[i] = new BulkItemResponse(i, opType, new BulkItemResponse.Failure(index, type, id, bulkItemCause));
expectedBulkItems[i] = new BulkItemResponse(i, opType, new BulkItemResponse.Failure(index, type, id, failures.v2(), ExceptionsHelper.status(bulkItemCause)));
}
}
BulkResponse bulkResponse = new BulkResponse(bulkItems, took, ingestTook);
BytesReference originalBytes = toXContent(bulkResponse, xContentType, humanReadable);
if (randomBoolean()) {
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
originalBytes = shuffleXContent(parser, randomBoolean()).bytes();
}
}
BulkResponse parsedBulkResponse;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
parsedBulkResponse = BulkResponse.fromXContent(parser);
assertNull(parser.nextToken());
}
assertEquals(took, parsedBulkResponse.getTookInMillis());
assertEquals(ingestTook, parsedBulkResponse.getIngestTookInMillis());
assertEquals(expectedBulkItems.length, parsedBulkResponse.getItems().length);
for (int i = 0; i < expectedBulkItems.length; i++) {
assertBulkItemResponse(expectedBulkItems[i], parsedBulkResponse.getItems()[i]);
}
BytesReference finalBytes = toXContent(parsedBulkResponse, xContentType, humanReadable);
BytesReference expectedFinalBytes = toXContent(parsedBulkResponse, xContentType, humanReadable);
assertToXContentEquivalent(expectedFinalBytes, finalBytes, xContentType);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentType in project elasticsearch by elastic.
the class MetaDataStateFormatTests method testLoadState.
public void testLoadState() throws IOException {
final Path[] dirs = new Path[randomIntBetween(1, 5)];
int numStates = randomIntBetween(1, 5);
int numLegacy = randomIntBetween(0, numStates);
List<MetaData> meta = new ArrayList<>();
for (int i = 0; i < numStates; i++) {
meta.add(randomMeta());
}
Set<Path> corruptedFiles = new HashSet<>();
MetaDataStateFormat<MetaData> format = metaDataFormat(randomFrom(XContentType.values()));
for (int i = 0; i < dirs.length; i++) {
dirs[i] = createTempDir();
Files.createDirectories(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME));
for (int j = 0; j < numLegacy; j++) {
XContentType type = format.format();
if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) {
Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j);
// randomly create 0-byte files -- there is extra logic to skip them
Files.createFile(file);
} else {
try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(type, Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) {
xcontentBuilder.startObject();
MetaData.Builder.toXContent(meta.get(j), xcontentBuilder, ToXContent.EMPTY_PARAMS);
xcontentBuilder.endObject();
}
}
}
for (int j = numLegacy; j < numStates; j++) {
format.write(meta.get(j), dirs[i]);
if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) {
// corrupt a file that we do not necessarily need here....
Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j + ".st");
corruptedFiles.add(file);
MetaDataStateFormatTests.corruptFile(file, logger);
}
}
}
List<Path> dirList = Arrays.asList(dirs);
Collections.shuffle(dirList, random());
MetaData loadedMetaData = format.loadLatestState(logger, xContentRegistry(), dirList.toArray(new Path[0]));
MetaData latestMetaData = meta.get(numStates - 1);
assertThat(loadedMetaData.clusterUUID(), not(equalTo("_na_")));
assertThat(loadedMetaData.clusterUUID(), equalTo(latestMetaData.clusterUUID()));
ImmutableOpenMap<String, IndexMetaData> indices = loadedMetaData.indices();
assertThat(indices.size(), equalTo(latestMetaData.indices().size()));
for (IndexMetaData original : latestMetaData) {
IndexMetaData deserialized = indices.get(original.getIndex().getName());
assertThat(deserialized, notNullValue());
assertThat(deserialized.getVersion(), equalTo(original.getVersion()));
assertThat(deserialized.getNumberOfReplicas(), equalTo(original.getNumberOfReplicas()));
assertThat(deserialized.getNumberOfShards(), equalTo(original.getNumberOfShards()));
}
// make sure the index tombstones are the same too
assertThat(loadedMetaData.indexGraveyard(), equalTo(latestMetaData.indexGraveyard()));
// now corrupt all the latest ones and make sure we fail to load the state
if (numStates > numLegacy) {
for (int i = 0; i < dirs.length; i++) {
Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (numStates - 1) + ".st");
if (corruptedFiles.contains(file)) {
continue;
}
MetaDataStateFormatTests.corruptFile(file, logger);
}
try {
format.loadLatestState(logger, xContentRegistry(), dirList.toArray(new Path[0]));
fail("latest version can not be read");
} catch (ElasticsearchException ex) {
assertThat(ExceptionsHelper.unwrap(ex, CorruptStateException.class), notNullValue());
}
}
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentType in project elasticsearch by elastic.
the class PipelineConfigurationTests method testParser.
public void testParser() throws IOException {
ContextParser<Void, PipelineConfiguration> parser = PipelineConfiguration.getParser();
XContentType xContentType = randomFrom(XContentType.values());
final BytesReference bytes;
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
new PipelineConfiguration("1", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON).toXContent(builder, ToXContent.EMPTY_PARAMS);
bytes = builder.bytes();
}
XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, bytes);
PipelineConfiguration parsed = parser.parse(xContentParser, null);
assertEquals(xContentType, parsed.getXContentType());
assertEquals("{}", XContentHelper.convertToJson(parsed.getConfig(), false, parsed.getXContentType()));
assertEquals("1", parsed.getId());
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentType in project elasticsearch by elastic.
the class QueryProfileShardResultTests method testFromXContent.
public void testFromXContent() throws IOException {
QueryProfileShardResult profileResult = createTestItem();
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toXContent(profileResult, xContentType, humanReadable);
QueryProfileShardResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
XContentParserUtils.ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation);
parsed = QueryProfileShardResult.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.common.xcontent.XContentType in project elasticsearch by elastic.
the class ProfileResultTests method testFromXContent.
public void testFromXContent() throws IOException {
ProfileResult profileResult = createTestItem(2);
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
BytesReference originalBytes = toXContent(profileResult, xContentType, humanReadable);
ProfileResult parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
parsed = ProfileResult.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertEquals(profileResult.getTime(), parsed.getTime());
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
}
Aggregations