use of org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class MultiMatchQueryBuilder method fromXContent.
public static MultiMatchQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
Object value = null;
Map<String, Float> fieldsBoosts = new HashMap<>();
MultiMatchQueryBuilder.Type type = DEFAULT_TYPE;
String analyzer = null;
int slop = DEFAULT_PHRASE_SLOP;
Fuzziness fuzziness = null;
int prefixLength = DEFAULT_PREFIX_LENGTH;
int maxExpansions = DEFAULT_MAX_EXPANSIONS;
Operator operator = DEFAULT_OPERATOR;
String minimumShouldMatch = null;
String fuzzyRewrite = null;
Boolean useDisMax = null;
Float tieBreaker = null;
Float cutoffFrequency = null;
boolean lenient = DEFAULT_LENIENCY;
MatchQuery.ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (FIELDS_FIELD.match(currentFieldName)) {
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseFieldAndBoost(parser, fieldsBoosts);
}
} else if (token.isValue()) {
parseFieldAndBoost(parser, fieldsBoosts);
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (QUERY_FIELD.match(currentFieldName)) {
value = parser.objectText();
} else if (TYPE_FIELD.match(currentFieldName)) {
type = MultiMatchQueryBuilder.Type.parse(parser.text());
} else if (ANALYZER_FIELD.match(currentFieldName)) {
analyzer = parser.text();
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) {
boost = parser.floatValue();
} else if (SLOP_FIELD.match(currentFieldName)) {
slop = parser.intValue();
} else if (Fuzziness.FIELD.match(currentFieldName)) {
fuzziness = Fuzziness.parse(parser);
} else if (PREFIX_LENGTH_FIELD.match(currentFieldName)) {
prefixLength = parser.intValue();
} else if (MAX_EXPANSIONS_FIELD.match(currentFieldName)) {
maxExpansions = parser.intValue();
} else if (OPERATOR_FIELD.match(currentFieldName)) {
operator = Operator.fromString(parser.text());
} else if (MINIMUM_SHOULD_MATCH_FIELD.match(currentFieldName)) {
minimumShouldMatch = parser.textOrNull();
} else if (FUZZY_REWRITE_FIELD.match(currentFieldName)) {
fuzzyRewrite = parser.textOrNull();
} else if (USE_DIS_MAX_FIELD.match(currentFieldName)) {
useDisMax = parser.booleanValue();
} else if (TIE_BREAKER_FIELD.match(currentFieldName)) {
tieBreaker = parser.floatValue();
} else if (CUTOFF_FREQUENCY_FIELD.match(currentFieldName)) {
cutoffFrequency = parser.floatValue();
} else if (LENIENT_FIELD.match(currentFieldName)) {
lenient = parser.booleanValue();
} else if (ZERO_TERMS_QUERY_FIELD.match(currentFieldName)) {
String zeroTermsDocs = parser.text();
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE;
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL;
} else {
throw new ParsingException(parser.getTokenLocation(), "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
}
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query does not support [" + currentFieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
}
}
if (value == null) {
throw new ParsingException(parser.getTokenLocation(), "No text specified for multi_match query");
}
if (fieldsBoosts.isEmpty()) {
throw new ParsingException(parser.getTokenLocation(), "No fields specified for multi_match query");
}
if (fuzziness != null && (type == Type.CROSS_FIELDS || type == Type.PHRASE || type == Type.PHRASE_PREFIX)) {
throw new ParsingException(parser.getTokenLocation(), "Fuzziness not allowed for type [" + type.parseField.getPreferredName() + "]");
}
return new MultiMatchQueryBuilder(value).fields(fieldsBoosts).type(type).analyzer(analyzer).cutoffFrequency(cutoffFrequency).fuzziness(fuzziness).fuzzyRewrite(fuzzyRewrite).useDisMax(useDisMax).lenient(lenient).maxExpansions(maxExpansions).minimumShouldMatch(minimumShouldMatch).operator(operator).prefixLength(prefixLength).slop(slop).tieBreaker(tieBreaker).zeroTermsQuery(zeroTermsQuery).boost(boost).queryName(queryName);
}
use of org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class DecayFunctionBuilder method parseGeoVariable.
private AbstractDistanceScoreFunction parseGeoVariable(XContentParser parser, QueryShardContext context, MappedFieldType fieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
GeoPoint origin = new GeoPoint();
String scaleString = null;
String offsetString = "0km";
double decay = 0.5;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
parameterName = parser.currentName();
} else if (DecayFunctionBuilder.SCALE.equals(parameterName)) {
scaleString = parser.text();
} else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) {
origin = GeoUtils.parseGeoPoint(parser);
} else if (DecayFunctionBuilder.DECAY.equals(parameterName)) {
decay = parser.doubleValue();
} else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) {
offsetString = parser.text();
} else {
throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
}
}
if (origin == null || scaleString == null) {
throw new ElasticsearchParseException("[{}] and [{}] must be set for geo fields.", DecayFunctionBuilder.ORIGIN, DecayFunctionBuilder.SCALE);
}
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode);
}
use of org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class DecayFunctionParser method fromXContent.
/**
* Parses bodies of the kind
*
* <pre>
* <code>
* {
* "fieldname1" : {
* "origin" : "someValue",
* "scale" : "someValue"
* },
* "multi_value_mode" : "min"
* }
* </code>
* </pre>
*/
@Override
public DFB fromXContent(QueryParseContext context) throws IOException, ParsingException {
XContentParser parser = context.parser();
String currentFieldName;
XContentParser.Token token;
MultiValueMode multiValueMode = DecayFunctionBuilder.DEFAULT_MULTI_VALUE_MODE;
String fieldName = null;
BytesReference functionBytes = null;
while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.copyCurrentStructure(parser);
functionBytes = builder.bytes();
} else if (MULTI_VALUE_MODE.match(currentFieldName)) {
multiValueMode = MultiValueMode.fromString(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(), "malformed score function score parameters.");
}
}
if (fieldName == null || functionBytes == null) {
throw new ParsingException(parser.getTokenLocation(), "malformed score function score parameters.");
}
DFB functionBuilder = createFromBytes.apply(fieldName, functionBytes);
functionBuilder.setMultiValueMode(multiValueMode);
return functionBuilder;
}
use of org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class FieldValueFactorFunctionBuilder method fromXContent.
public static FieldValueFactorFunctionBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException {
XContentParser parser = parseContext.parser();
String currentFieldName = null;
String field = null;
float boostFactor = FieldValueFactorFunctionBuilder.DEFAULT_FACTOR;
FieldValueFactorFunction.Modifier modifier = FieldValueFactorFunction.Modifier.NONE;
Double missing = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("field".equals(currentFieldName)) {
field = parser.text();
} else if ("factor".equals(currentFieldName)) {
boostFactor = parser.floatValue();
} else if ("modifier".equals(currentFieldName)) {
modifier = FieldValueFactorFunction.Modifier.fromString(parser.text());
} else if ("missing".equals(currentFieldName)) {
missing = parser.doubleValue();
} else {
throw new ParsingException(parser.getTokenLocation(), NAME + " query does not support [" + currentFieldName + "]");
}
} else if ("factor".equals(currentFieldName) && (token == XContentParser.Token.START_ARRAY || token == XContentParser.Token.START_OBJECT)) {
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] field 'factor' does not support lists or objects");
}
}
if (field == null) {
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] required field 'field' missing");
}
FieldValueFactorFunctionBuilder fieldValueFactorFunctionBuilder = new FieldValueFactorFunctionBuilder(field).factor(boostFactor).modifier(modifier);
if (missing != null) {
fieldValueFactorFunctionBuilder.missing(missing);
}
return fieldValueFactorFunctionBuilder;
}
use of org.elasticsearch.common.xcontent.XContentParser in project elasticsearch by elastic.
the class FunctionScoreQueryBuilder method parseFiltersAndFunctions.
private static String parseFiltersAndFunctions(QueryParseContext parseContext, List<FunctionScoreQueryBuilder.FilterFunctionBuilder> filterFunctionBuilders) throws IOException {
String currentFieldName = null;
XContentParser.Token token;
XContentParser parser = parseContext.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
QueryBuilder filter = null;
ScoreFunctionBuilder<?> scoreFunction = null;
Float functionWeight = null;
if (token != XContentParser.Token.START_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}]. malformed query, expected a [{}] while parsing functions but got a [{}] instead", XContentParser.Token.START_OBJECT, token, NAME);
} else {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (FILTER_FIELD.match(currentFieldName)) {
filter = parseContext.parseInnerQueryBuilder();
} else {
if (scoreFunction != null) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse function_score functions. already found [{}], now encountering [{}].", scoreFunction.getName(), currentFieldName);
}
scoreFunction = parser.namedObject(ScoreFunctionBuilder.class, currentFieldName, parseContext);
}
} else if (token.isValue()) {
if (WEIGHT_FIELD.match(currentFieldName)) {
functionWeight = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. field [{}] is not supported", NAME, currentFieldName);
}
}
}
if (functionWeight != null) {
if (scoreFunction == null) {
scoreFunction = new WeightBuilder().setWeight(functionWeight);
} else {
scoreFunction.setWeight(functionWeight);
}
}
}
if (filter == null) {
filter = new MatchAllQueryBuilder();
}
if (scoreFunction == null) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. an entry in functions list is missing a function.", NAME);
}
filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(filter, scoreFunction));
}
return currentFieldName;
}
Aggregations