use of org.opensearch.common.CheckedConsumer in project OpenSearch by opensearch-project.
the class BulkItemResponse method fromXContent.
/**
* Reads a {@link BulkItemResponse} from a {@link XContentParser}.
*
* @param parser the {@link XContentParser}
* @param id the id to assign to the parsed {@link BulkItemResponse}. It is usually the index of
* the item in the {@link BulkResponse#getItems} array.
*/
public static BulkItemResponse fromXContent(XContentParser parser, int id) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String currentFieldName = parser.currentName();
token = parser.nextToken();
final OpType opType = OpType.fromString(currentFieldName);
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
DocWriteResponse.Builder builder = null;
CheckedConsumer<XContentParser, IOException> itemParser = null;
if (opType == OpType.INDEX || opType == OpType.CREATE) {
final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder();
builder = indexResponseBuilder;
itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder);
} else if (opType == OpType.UPDATE) {
final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder();
builder = updateResponseBuilder;
itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder);
} else if (opType == OpType.DELETE) {
final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder();
builder = deleteResponseBuilder;
itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder);
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
RestStatus status = null;
OpenSearchException exception = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
}
if (ERROR.equals(currentFieldName)) {
if (token == XContentParser.Token.START_OBJECT) {
exception = OpenSearchException.fromXContent(parser);
}
} else if (STATUS.equals(currentFieldName)) {
if (token == XContentParser.Token.VALUE_NUMBER) {
status = RestStatus.fromCode(parser.intValue());
}
} else {
itemParser.accept(parser);
}
}
ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser);
token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser);
BulkItemResponse bulkItemResponse;
if (exception != null) {
Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getId(), exception, status);
bulkItemResponse = new BulkItemResponse(id, opType, failure);
} else {
bulkItemResponse = new BulkItemResponse(id, opType, builder.build());
}
return bulkItemResponse;
}
use of org.opensearch.common.CheckedConsumer in project OpenSearch by opensearch-project.
the class BootstrapChecksTests method testDiscoveryConfiguredCheck.
public void testDiscoveryConfiguredCheck() throws NodeValidationException {
final List<BootstrapCheck> checks = Collections.singletonList(new BootstrapChecks.DiscoveryConfiguredCheck());
final BootstrapContext zen2Context = createTestContext(Settings.builder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), ZEN2_DISCOVERY_TYPE).build(), Metadata.EMPTY_METADATA);
// not always enforced
BootstrapChecks.check(zen2Context, false, checks);
// not enforced for non-zen2 discovery
BootstrapChecks.check(createTestContext(Settings.builder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), randomFrom("single-node", randomAlphaOfLength(5))).build(), Metadata.EMPTY_METADATA), true, checks);
final NodeValidationException e = expectThrows(NodeValidationException.class, () -> BootstrapChecks.check(zen2Context, true, checks));
assertThat(e, hasToString(containsString("the default discovery settings are unsuitable for production use; at least one " + "of [discovery.seed_hosts, discovery.seed_providers, cluster.initial_master_nodes] must be configured")));
CheckedConsumer<Settings.Builder, NodeValidationException> ensureChecksPass = b -> {
final BootstrapContext context = createTestContext(b.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), ZEN2_DISCOVERY_TYPE).build(), Metadata.EMPTY_METADATA);
BootstrapChecks.check(context, true, checks);
};
ensureChecksPass.accept(Settings.builder().putList(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey()));
ensureChecksPass.accept(Settings.builder().putList(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey()));
ensureChecksPass.accept(Settings.builder().putList(SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING.getKey()));
}
use of org.opensearch.common.CheckedConsumer in project OpenSearch by opensearch-project.
the class ActionListenerTests method testOnFailure.
public void testOnFailure() {
final int numListeners = randomIntBetween(1, 20);
List<AtomicReference<Boolean>> refList = new ArrayList<>();
List<AtomicReference<Exception>> excList = new ArrayList<>();
List<ActionListener<Boolean>> listeners = new ArrayList<>();
final int listenerToFail = randomBoolean() ? -1 : randomIntBetween(0, numListeners - 1);
for (int i = 0; i < numListeners; i++) {
AtomicReference<Boolean> reference = new AtomicReference<>();
AtomicReference<Exception> exReference = new AtomicReference<>();
refList.add(reference);
excList.add(exReference);
boolean fail = i == listenerToFail;
CheckedConsumer<Boolean, ? extends Exception> handler = (o) -> {
reference.set(o);
};
listeners.add(ActionListener.wrap(handler, (e) -> {
exReference.set(e);
if (fail) {
throw new RuntimeException("double boom");
}
}));
}
try {
ActionListener.onFailure(listeners, new Exception("booom"));
assertTrue("unexpected succces listener to fail: " + listenerToFail, listenerToFail == -1);
} catch (RuntimeException ex) {
assertTrue("listener to fail: " + listenerToFail, listenerToFail >= 0);
assertNotNull(ex.getCause());
assertEquals("double boom", ex.getCause().getMessage());
}
for (int i = 0; i < numListeners; i++) {
assertNull("listener index " + i, refList.get(i).get());
}
for (int i = 0; i < numListeners; i++) {
assertEquals("listener index " + i, "booom", excList.get(i).get().getMessage());
}
}
use of org.opensearch.common.CheckedConsumer in project OpenSearch by opensearch-project.
the class RemoveCorruptedShardDataCommand method findAndProcessShardPath.
protected void findAndProcessShardPath(OptionSet options, Environment environment, Path[] dataPaths, int nodeLockId, ClusterState clusterState, CheckedConsumer<ShardPath, IOException> consumer) throws IOException {
final Settings settings = environment.settings();
final IndexMetadata indexMetadata;
final int shardId;
final int fromNodeId;
final int toNodeId;
if (options.has(folderOption)) {
final Path path = getPath(folderOption.value(options)).getParent();
final Path shardParent = path.getParent();
final Path shardParentParent = shardParent.getParent();
final Path indexPath = path.resolve(ShardPath.INDEX_FOLDER_NAME);
if (Files.exists(indexPath) == false || Files.isDirectory(indexPath) == false) {
throw new OpenSearchException("index directory [" + indexPath + "], must exist and be a directory");
}
final String shardIdFileName = path.getFileName().toString();
final String nodeIdFileName = shardParentParent.getParent().getFileName().toString();
final String indexUUIDFolderName = shardParent.getFileName().toString();
if (Files.isDirectory(path) && // SHARD-ID path element check
shardIdFileName.chars().allMatch(Character::isDigit) && // `indices` check
NodeEnvironment.INDICES_FOLDER.equals(shardParentParent.getFileName().toString()) && // NODE-ID check
nodeIdFileName.chars().allMatch(Character::isDigit) && // `nodes` check
NodeEnvironment.NODES_FOLDER.equals(shardParentParent.getParent().getParent().getFileName().toString())) {
shardId = Integer.parseInt(shardIdFileName);
fromNodeId = Integer.parseInt(nodeIdFileName);
toNodeId = fromNodeId + 1;
indexMetadata = StreamSupport.stream(clusterState.metadata().indices().values().spliterator(), false).map(imd -> imd.value).filter(imd -> imd.getIndexUUID().equals(indexUUIDFolderName)).findFirst().orElse(null);
} else {
throw new OpenSearchException("Unable to resolve shard id. Wrong folder structure at [ " + path.toString() + " ], expected .../nodes/[NODE-ID]/indices/[INDEX-UUID]/[SHARD-ID]");
}
} else {
// otherwise resolve shardPath based on the index name and shard id
String indexName = Objects.requireNonNull(indexNameOption.value(options), "Index name is required");
shardId = Objects.requireNonNull(shardIdOption.value(options), "Shard ID is required");
indexMetadata = clusterState.metadata().index(indexName);
}
if (indexMetadata == null) {
throw new OpenSearchException("Unable to find index in cluster state");
}
final IndexSettings indexSettings = new IndexSettings(indexMetadata, settings);
final Index index = indexMetadata.getIndex();
final ShardId shId = new ShardId(index, shardId);
for (Path dataPath : dataPaths) {
final Path shardPathLocation = dataPath.resolve(NodeEnvironment.INDICES_FOLDER).resolve(index.getUUID()).resolve(Integer.toString(shId.id()));
if (Files.exists(shardPathLocation)) {
final ShardPath shardPath = ShardPath.loadShardPath(logger, shId, indexSettings.customDataPath(), new Path[] { shardPathLocation }, nodeLockId, dataPath);
if (shardPath != null) {
consumer.accept(shardPath);
return;
}
}
}
throw new OpenSearchException("Unable to resolve shard path for index [" + indexMetadata.getIndex().getName() + "] and shard id [" + shardId + "]");
}
use of org.opensearch.common.CheckedConsumer in project OpenSearch by opensearch-project.
the class GeoGridAggregatorTestCase method testAsSubAgg.
public void testAsSubAgg() throws IOException {
int precision = randomPrecision();
Map<String, Map<String, Long>> expectedCountPerTPerGeoHash = new TreeMap<>();
List<List<IndexableField>> docs = new ArrayList<>();
for (int i = 0; i < 30; i++) {
String t = randomAlphaOfLength(1);
double[] latLng = randomLatLng();
List<IndexableField> doc = new ArrayList<>();
docs.add(doc);
doc.add(new LatLonDocValuesField(FIELD_NAME, latLng[0], latLng[1]));
doc.add(new SortedSetDocValuesField("t", new BytesRef(t)));
String hash = hashAsString(latLng[1], latLng[0], precision);
Map<String, Long> expectedCountPerGeoHash = expectedCountPerTPerGeoHash.get(t);
if (expectedCountPerGeoHash == null) {
expectedCountPerGeoHash = new TreeMap<>();
expectedCountPerTPerGeoHash.put(t, expectedCountPerGeoHash);
}
expectedCountPerGeoHash.put(hash, expectedCountPerGeoHash.getOrDefault(hash, 0L) + 1);
}
CheckedConsumer<RandomIndexWriter, IOException> buildIndex = iw -> iw.addDocuments(docs);
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("t").field("t").size(expectedCountPerTPerGeoHash.size()).subAggregation(createBuilder("gg").field(FIELD_NAME).precision(precision));
Consumer<StringTerms> verify = (terms) -> {
Map<String, Map<String, Long>> actual = new TreeMap<>();
for (StringTerms.Bucket tb : terms.getBuckets()) {
InternalGeoGrid<?> gg = tb.getAggregations().get("gg");
Map<String, Long> sub = new TreeMap<>();
for (InternalGeoGridBucket<?> ggb : gg.getBuckets()) {
sub.put(ggb.getKeyAsString(), ggb.getDocCount());
}
actual.put(tb.getKeyAsString(), sub);
}
assertThat(actual, equalTo(expectedCountPerTPerGeoHash));
};
testCase(aggregationBuilder, new MatchAllDocsQuery(), buildIndex, verify, keywordField("t"), geoPointField(FIELD_NAME));
}
Aggregations