use of org.opensearch.common.xcontent.XContentParser.Token in project OpenSearch by opensearch-project.
the class GetAliasesResponse method parseAliases.
private static Set<AliasMetadata> parseAliases(XContentParser parser) throws IOException {
Set<AliasMetadata> aliases = new HashSet<>();
Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == Token.START_OBJECT) {
if ("aliases".equals(currentFieldName)) {
while (parser.nextToken() != Token.END_OBJECT) {
AliasMetadata fromXContent = AliasMetadata.Builder.fromXContent(parser);
aliases.add(fromXContent);
}
} else {
parser.skipChildren();
}
} else if (token == Token.START_ARRAY) {
parser.skipChildren();
}
}
return aliases;
}
use of org.opensearch.common.xcontent.XContentParser.Token in project OpenSearch by opensearch-project.
the class XContentParserUtils method parseFieldsValue.
/**
* Parse the current token depending on its token type. The following token types will be
* parsed by the corresponding parser methods:
* <ul>
* <li>{@link Token#VALUE_STRING}: {@link XContentParser#text()}</li>
* <li>{@link Token#VALUE_NUMBER}: {@link XContentParser#numberValue()} ()}</li>
* <li>{@link Token#VALUE_BOOLEAN}: {@link XContentParser#booleanValue()} ()}</li>
* <li>{@link Token#VALUE_EMBEDDED_OBJECT}: {@link XContentParser#binaryValue()} ()}</li>
* <li>{@link Token#VALUE_NULL}: returns null</li>
* <li>{@link Token#START_OBJECT}: {@link XContentParser#mapOrdered()} ()}</li>
* <li>{@link Token#START_ARRAY}: {@link XContentParser#listOrderedMap()} ()}</li>
* </ul>
*
* @throws ParsingException if the token is none of the allowed values
*/
public static Object parseFieldsValue(XContentParser parser) throws IOException {
Token token = parser.currentToken();
Object value = null;
if (token == Token.VALUE_STRING) {
// binary values will be parsed back and returned as base64 strings when reading from json and yaml
value = parser.text();
} else if (token == Token.VALUE_NUMBER) {
value = parser.numberValue();
} else if (token == Token.VALUE_BOOLEAN) {
value = parser.booleanValue();
} else if (token == Token.VALUE_EMBEDDED_OBJECT) {
// binary values will be parsed back and returned as BytesArray when reading from cbor and smile
value = new BytesArray(parser.binaryValue());
} else if (token == Token.VALUE_NULL) {
value = null;
} else if (token == Token.START_OBJECT) {
value = parser.mapOrdered();
} else if (token == Token.START_ARRAY) {
value = parser.listOrderedMap();
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
return value;
}
use of org.opensearch.common.xcontent.XContentParser.Token in project OpenSearch by opensearch-project.
the class RankFeaturesFieldMapper method parse.
@Override
public void parse(ParseContext context) throws IOException {
if (context.externalValueSet()) {
throw new IllegalArgumentException("[rank_features] fields can't be used in multi-fields");
}
if (context.parser().currentToken() != Token.START_OBJECT) {
throw new IllegalArgumentException("[rank_features] fields must be json objects, expected a START_OBJECT but got: " + context.parser().currentToken());
}
String feature = null;
for (Token token = context.parser().nextToken(); token != Token.END_OBJECT; token = context.parser().nextToken()) {
if (token == Token.FIELD_NAME) {
feature = context.parser().currentName();
} else if (token == Token.VALUE_NULL) {
// ignore feature, this is consistent with numeric fields
} else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) {
final String key = name() + "." + feature;
float value = context.parser().floatValue(true);
if (context.doc().getByKey(key) != null) {
throw new IllegalArgumentException("[rank_features] fields do not support indexing multiple values for the same " + "rank feature [" + key + "] in the same document");
}
context.doc().addWithKey(key, new FeatureField(name(), feature, value));
} else {
throw new IllegalArgumentException("[rank_features] fields take hashes that map a feature to a strictly positive " + "float, but got unexpected token " + token);
}
}
}
use of org.opensearch.common.xcontent.XContentParser.Token in project OpenSearch by opensearch-project.
the class MultiSearchResponse method itemFromXContent.
private static MultiSearchResponse.Item itemFromXContent(XContentParser parser) throws IOException {
// This parsing logic is a bit tricky here, because the multi search response itself is tricky:
// 1) The json objects inside the responses array are either a search response or a serialized exception
// 2) Each response json object gets a status field injected that OpenSearchException.failureFromXContent(...) does not parse,
// but SearchResponse.innerFromXContent(...) parses and then ignores. The status field is not needed to parse
// the response item. However in both cases this method does need to parse the 'status' field otherwise the parsing of
// the response item in the next json array element will fail due to parsing errors.
Item item = null;
String fieldName = null;
Token token = parser.nextToken();
assert token == Token.FIELD_NAME;
outer: for (; token != Token.END_OBJECT; token = parser.nextToken()) {
switch(token) {
case FIELD_NAME:
fieldName = parser.currentName();
if ("error".equals(fieldName)) {
item = new Item(null, OpenSearchException.failureFromXContent(parser));
} else if ("status".equals(fieldName) == false) {
item = new Item(SearchResponse.innerFromXContent(parser), null);
break outer;
}
break;
case VALUE_NUMBER:
if ("status".equals(fieldName)) {
// Ignore the status value
}
break;
}
}
assert parser.currentToken() == Token.END_OBJECT;
return item;
}
use of org.opensearch.common.xcontent.XContentParser.Token in project OpenSearch by opensearch-project.
the class MultiGetRequest method parseDocuments.
private static void parseDocuments(XContentParser parser, List<Item> items, @Nullable String defaultIndex, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, boolean allowExplicitIndex) throws IOException {
String currentFieldName = null;
Token token;
while ((token = parser.nextToken()) != Token.END_ARRAY) {
if (token != Token.START_OBJECT) {
throw new IllegalArgumentException("docs array element should include an object");
}
String index = defaultIndex;
String id = null;
String routing = defaultRouting;
List<String> storedFields = null;
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
FetchSourceContext fetchSourceContext = FetchSourceContext.FETCH_SOURCE;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in multi get is not allowed");
}
index = parser.text();
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
id = parser.text();
} else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
routing = parser.text();
} else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
throw new ParsingException(parser.getTokenLocation(), "Unsupported field [fields] used, expected [stored_fields] instead");
} else if (STORED_FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
storedFields = new ArrayList<>();
storedFields.add(parser.text());
} else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) {
version = parser.longValue();
} else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
versionType = VersionType.fromString(parser.text());
} else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
if (parser.isBooleanValue()) {
fetchSourceContext = new FetchSourceContext(parser.booleanValue(), fetchSourceContext.includes(), fetchSourceContext.excludes());
} else if (token == Token.VALUE_STRING) {
fetchSourceContext = new FetchSourceContext(fetchSourceContext.fetchSource(), new String[] { parser.text() }, fetchSourceContext.excludes());
} else {
throw new OpenSearchParseException("illegal type for _source: [{}]", token);
}
} else {
throw new OpenSearchParseException("failed to parse multi get request. unknown field [{}]", currentFieldName);
}
} else if (token == Token.START_ARRAY) {
if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
throw new ParsingException(parser.getTokenLocation(), "Unsupported field [fields] used, expected [stored_fields] instead");
} else if (STORED_FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
storedFields = new ArrayList<>();
while ((token = parser.nextToken()) != Token.END_ARRAY) {
storedFields.add(parser.text());
}
} else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
ArrayList<String> includes = new ArrayList<>();
while ((token = parser.nextToken()) != Token.END_ARRAY) {
includes.add(parser.text());
}
fetchSourceContext = new FetchSourceContext(fetchSourceContext.fetchSource(), includes.toArray(Strings.EMPTY_ARRAY), fetchSourceContext.excludes());
}
} else if (token == Token.START_OBJECT) {
if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
List<String> currentList = null, includes = null, excludes = null;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if ("includes".equals(currentFieldName) || "include".equals(currentFieldName)) {
currentList = includes != null ? includes : (includes = new ArrayList<>(2));
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
} else {
throw new OpenSearchParseException("source definition may not contain [{}]", parser.text());
}
} else if (token == Token.START_ARRAY) {
while ((token = parser.nextToken()) != Token.END_ARRAY) {
currentList.add(parser.text());
}
} else if (token.isValue()) {
currentList.add(parser.text());
} else {
throw new OpenSearchParseException("unexpected token while parsing source settings");
}
}
fetchSourceContext = new FetchSourceContext(fetchSourceContext.fetchSource(), includes == null ? Strings.EMPTY_ARRAY : includes.toArray(new String[includes.size()]), excludes == null ? Strings.EMPTY_ARRAY : excludes.toArray(new String[excludes.size()]));
}
}
}
String[] aFields;
if (storedFields != null) {
aFields = storedFields.toArray(new String[storedFields.size()]);
} else {
aFields = defaultFields;
}
items.add(new Item(index, id).routing(routing).storedFields(aFields).version(version).versionType(versionType).fetchSourceContext(fetchSourceContext == FetchSourceContext.FETCH_SOURCE ? defaultFetchSource : fetchSourceContext));
}
}
Aggregations