use of org.opensearch.OpenSearchParseException in project OpenSearch by opensearch-project.
the class DateProcessorFactoryTests method testParseMatchFormatsFailure.
public void testParseMatchFormatsFailure() throws Exception {
Map<String, Object> config = new HashMap<>();
String sourceField = randomAlphaOfLengthBetween(1, 10);
config.put("field", sourceField);
config.put("formats", "dd/MM/yyyy");
try {
factory.create(null, null, null, config);
fail("processor creation should have failed");
} catch (OpenSearchParseException e) {
assertThat(e.getMessage(), containsString("[formats] property isn't a list, but of type [java.lang.String]"));
}
}
use of org.opensearch.OpenSearchParseException in project OpenSearch by opensearch-project.
the class UserAgentParser method init.
private void init(InputStream regexStream) throws IOException {
// EMPTY is safe here because we don't use namedObject
XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, regexStream);
XContentParser.Token token = yamlParser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
token = yamlParser.nextToken();
for (; token != null; token = yamlParser.nextToken()) {
if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("user_agent_parsers")) {
List<Map<String, String>> parserConfigurations = readParserConfigurations(yamlParser);
for (Map<String, String> map : parserConfigurations) {
uaPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), map.get("family_replacement"), map.get("v1_replacement"), map.get("v2_replacement"), map.get("v3_replacement"), map.get("v4_replacement")));
}
} else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) {
List<Map<String, String>> parserConfigurations = readParserConfigurations(yamlParser);
for (Map<String, String> map : parserConfigurations) {
osPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), map.get("os_replacement"), map.get("os_v1_replacement"), map.get("os_v2_replacement"), map.get("os_v3_replacement"), map.get("os_v4_replacement")));
}
} else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) {
List<Map<String, String>> parserConfigurations = readParserConfigurations(yamlParser);
for (Map<String, String> map : parserConfigurations) {
devicePatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), map.get("device_replacement"), null, null, null, null));
}
}
}
}
if (uaPatterns.isEmpty() && osPatterns.isEmpty() && devicePatterns.isEmpty()) {
throw new OpenSearchParseException("not a valid regular expression file");
}
}
use of org.opensearch.OpenSearchParseException in project OpenSearch by opensearch-project.
the class UserAgentProcessorFactoryTests method testInvalidProperty.
public void testInvalidProperty() throws Exception {
UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers);
Map<String, Object> config = new HashMap<>();
config.put("field", "_field");
config.put("properties", Collections.singletonList("invalid"));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config));
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [NAME, MAJOR, MINOR, " + "PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD, ORIGINAL, VERSION]"));
}
use of org.opensearch.OpenSearchParseException in project OpenSearch by opensearch-project.
the class RareClusterStateIT method testDelayedMappingPropagationOnPrimary.
public void testDelayedMappingPropagationOnPrimary() throws Exception {
// Here we want to test that things go well if there is a first request
// that adds mappings but before mappings are propagated to all nodes
// another index request introduces the same mapping. The master node
// will reply immediately since it did not change the cluster state
// but the change might not be on the node that performed the indexing
// operation yet
final List<String> nodeNames = internalCluster().startNodes(2);
assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut());
final String master = internalCluster().getMasterName();
assertThat(nodeNames, hasItem(master));
String otherNode = null;
for (String node : nodeNames) {
if (node.equals(master) == false) {
otherNode = node;
break;
}
}
assertNotNull(otherNode);
// Don't allocate the shard on the master node
assertAcked(prepareCreate("index").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put("index.routing.allocation.exclude._name", master)).get());
ensureGreen();
// Check routing tables
ClusterState state = client().admin().cluster().prepareState().get().getState();
assertEquals(master, state.nodes().getMasterNode().getName());
List<ShardRouting> shards = state.routingTable().allShards("index");
assertThat(shards, hasSize(1));
for (ShardRouting shard : shards) {
if (shard.primary()) {
// primary must not be on the master node
assertFalse(state.nodes().getMasterNodeId().equals(shard.currentNodeId()));
} else {
// only primaries
fail();
}
}
// Block cluster state processing where our shard is
BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(otherNode, random());
internalCluster().setDisruptionScheme(disruption);
disruption.startDisrupting();
// Add a new mapping...
ActionFuture<AcknowledgedResponse> putMappingResponse = executeAndCancelCommittedPublication(client().admin().indices().preparePutMapping("index").setSource("field", "type=long"));
// ...and wait for mappings to be available on master
assertBusy(() -> {
MappingMetadata typeMappings = client().admin().indices().prepareGetMappings("index").get().getMappings().get("index");
assertNotNull(typeMappings);
Object properties;
try {
properties = typeMappings.getSourceAsMap().get("properties");
} catch (OpenSearchParseException e) {
throw new AssertionError(e);
}
assertNotNull(properties);
Object fieldMapping = ((Map<String, Object>) properties).get("field");
assertNotNull(fieldMapping);
});
// this request does not change the cluster state, because mapping is already created,
// we don't await and cancel committed publication
ActionFuture<IndexResponse> docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute();
// Wait a bit to make sure that the reason why we did not get a response
// is that cluster state processing is blocked and not just that it takes
// time to process the indexing request
Thread.sleep(100);
assertFalse(putMappingResponse.isDone());
assertFalse(docIndexResponse.isDone());
// Now make sure the indexing request finishes successfully
disruption.stopDisrupting();
assertTrue(putMappingResponse.get(10, TimeUnit.SECONDS).isAcknowledged());
assertThat(docIndexResponse.get(10, TimeUnit.SECONDS), instanceOf(IndexResponse.class));
assertEquals(1, docIndexResponse.get(10, TimeUnit.SECONDS).getShardInfo().getTotal());
}
use of org.opensearch.OpenSearchParseException in project OpenSearch by opensearch-project.
the class GeoUtils method parseGeoPoint.
/**
* Parses the value as a geopoint. The following types of values are supported:
* <p>
* Object: has to contain either lat and lon or geohash fields
* <p>
* String: expected to be in "latitude, longitude" format or a geohash
* <p>
* Array: two or more elements, the first element is longitude, the second is latitude, the rest is ignored if ignoreZValue is true
*/
public static GeoPoint parseGeoPoint(Object value, GeoPoint point, final boolean ignoreZValue) throws OpenSearchParseException {
try (XContentParser parser = new MapXContentParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, Collections.singletonMap("null_value", value), null)) {
// start object
parser.nextToken();
// field name
parser.nextToken();
// field value
parser.nextToken();
return parseGeoPoint(parser, point, ignoreZValue);
} catch (IOException ex) {
throw new OpenSearchParseException("error parsing geopoint", ex);
}
}
Aggregations