use of org.opensearch.action.ingest.PutPipelineRequest in project OpenSearch by opensearch-project.
the class IngestServiceTests method testExecuteBulkPipelineDoesNotExist.
public void testExecuteBulkPipelineDoesNotExist() {
IngestService ingestService = createWithProcessors(Collections.singletonMap("mock", (factories, tag, description, config) -> mockCompoundProcessor()));
PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), XContentType.JSON);
// Start empty
ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build();
ClusterState previousClusterState = clusterState;
clusterState = IngestService.innerPut(putRequest, clusterState);
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
BulkRequest bulkRequest = new BulkRequest();
IndexRequest indexRequest1 = new IndexRequest("_index").id("_id1").source(emptyMap()).setPipeline("_none").setFinalPipeline("_none");
bulkRequest.add(indexRequest1);
IndexRequest indexRequest2 = new IndexRequest("_index").id("_id2").source(emptyMap()).setPipeline("_id").setFinalPipeline("_none");
bulkRequest.add(indexRequest2);
IndexRequest indexRequest3 = new IndexRequest("_index").id("_id3").source(emptyMap()).setPipeline("does_not_exist").setFinalPipeline("_none");
bulkRequest.add(indexRequest3);
@SuppressWarnings("unchecked") BiConsumer<Integer, Exception> failureHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked") final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(bulkRequest.numberOfActions(), bulkRequest.requests(), failureHandler, completionHandler, indexReq -> {
}, Names.WRITE);
verify(failureHandler, times(1)).accept(argThat((Integer item) -> item == 2), argThat((IllegalArgumentException iae) -> "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage())));
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
use of org.opensearch.action.ingest.PutPipelineRequest in project OpenSearch by opensearch-project.
the class IngestServiceTests method testValidate.
public void testValidate() throws Exception {
IngestService ingestService = createWithProcessors();
PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\", \"tag\": \"tag1\"}}," + "{\"remove\" : {\"field\": \"_field\", \"tag\": \"tag2\"}}]}"), XContentType.JSON);
DiscoveryNode node1 = new DiscoveryNode("_node_id1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT);
DiscoveryNode node2 = new DiscoveryNode("_node_id2", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT);
Map<DiscoveryNode, IngestInfo> ingestInfos = new HashMap<>();
ingestInfos.put(node1, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove"))));
ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"))));
OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> ingestService.validatePipeline(ingestInfos, putRequest));
assertEquals("Processor type [remove] is not installed on node [" + node2 + "]", e.getMessage());
assertEquals("remove", e.getMetadata("opensearch.processor_type").get(0));
assertEquals("tag2", e.getMetadata("opensearch.processor_tag").get(0));
ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove"))));
ingestService.validatePipeline(ingestInfos, putRequest);
}
use of org.opensearch.action.ingest.PutPipelineRequest in project OpenSearch by opensearch-project.
the class IngestServiceTests method testBulkRequestExecution.
public void testBulkRequestExecution() throws Exception {
BulkRequest bulkRequest = new BulkRequest();
String pipelineId = "_id";
// Test to make sure that ingest respects content types other than the default index content type
XContentType xContentType = randomFrom(Arrays.stream(XContentType.values()).filter(t -> Requests.INDEX_CONTENT_TYPE.equals(t) == false).collect(Collectors.toList()));
logger.info("Using [{}], not randomly determined default [{}]", xContentType, Requests.INDEX_CONTENT_TYPE);
int numRequest = scaledRandomIntBetween(8, 64);
for (int i = 0; i < numRequest; i++) {
IndexRequest indexRequest = new IndexRequest("_index").id("_id").setPipeline(pipelineId).setFinalPipeline("_none");
indexRequest.source(xContentType, "field1", "value1");
bulkRequest.add(indexRequest);
}
final Processor processor = mock(Processor.class);
when(processor.getType()).thenReturn("mock");
when(processor.getTag()).thenReturn("mockTag");
doAnswer(args -> {
@SuppressWarnings("unchecked") BiConsumer<IngestDocument, Exception> handler = (BiConsumer) args.getArguments()[1];
handler.accept(RandomDocumentPicks.randomIngestDocument(random()), null);
return null;
}).when(processor).execute(any(), any());
Map<String, Processor.Factory> map = new HashMap<>(2);
map.put("mock", (factories, tag, description, config) -> processor);
IngestService ingestService = createWithProcessors(map);
PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"mock\": {}}], \"description\": \"_description\"}"), XContentType.JSON);
ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build();
ClusterState previousClusterState = clusterState;
clusterState = IngestService.innerPut(putRequest, clusterState);
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
@SuppressWarnings("unchecked") BiConsumer<Integer, Exception> requestItemErrorHandler = mock(BiConsumer.class);
@SuppressWarnings("unchecked") final BiConsumer<Thread, Exception> completionHandler = mock(BiConsumer.class);
ingestService.executeBulkRequest(numRequest, bulkRequest.requests(), requestItemErrorHandler, completionHandler, indexReq -> {
}, Names.WRITE);
verify(requestItemErrorHandler, never()).accept(any(), any());
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
for (DocWriteRequest<?> docWriteRequest : bulkRequest.requests()) {
IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(docWriteRequest);
assertThat(indexRequest, notNullValue());
assertThat(indexRequest.getContentType(), equalTo(xContentType));
}
}
use of org.opensearch.action.ingest.PutPipelineRequest in project OpenSearch by opensearch-project.
the class IngestServiceTests method testCBORParsing.
public void testCBORParsing() throws Exception {
AtomicReference<Object> reference = new AtomicReference<>();
Consumer<IngestDocument> executor = doc -> reference.set(doc.getFieldValueAsBytes("data"));
final IngestService ingestService = createWithProcessors(Collections.singletonMap("foo", (factories, tag, description, config) -> new FakeProcessor("foo", tag, description, executor)));
ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build();
ClusterState previousClusterState = clusterState;
PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"foo\" : {}}]}"), XContentType.JSON);
clusterState = IngestService.innerPut(putRequest, clusterState);
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
assertThat(ingestService.getPipeline("_id"), notNullValue());
try (XContentBuilder builder = CborXContent.contentBuilder()) {
builder.startObject();
builder.field("data", "This is my data".getBytes(StandardCharsets.UTF_8));
builder.endObject();
IndexRequest indexRequest = new IndexRequest("_index").id("_doc-id").source(builder).setPipeline("_id").setFinalPipeline("_none");
ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), (integer, e) -> {
}, (thread, e) -> {
}, indexReq -> {
}, Names.WRITE);
}
assertThat(reference.get(), is(instanceOf(byte[].class)));
}
use of org.opensearch.action.ingest.PutPipelineRequest in project OpenSearch by opensearch-project.
the class IngestServiceTests method testPutWithErrorResponse.
public void testPutWithErrorResponse() throws IllegalAccessException {
IngestService ingestService = createWithProcessors();
String id = "_id";
Pipeline pipeline = ingestService.getPipeline(id);
assertThat(pipeline, nullValue());
ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build();
PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray("{\"description\": \"empty processors\"}"), XContentType.JSON);
ClusterState previousClusterState = clusterState;
clusterState = IngestService.innerPut(putRequest, clusterState);
try (MockLogAppender mockAppender = MockLogAppender.createForLoggers(LogManager.getLogger(IngestService.class))) {
mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test1", IngestService.class.getCanonicalName(), Level.WARN, "failed to update ingest pipelines"));
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
mockAppender.assertAllExpectationsMatched();
}
pipeline = ingestService.getPipeline(id);
assertNotNull(pipeline);
assertThat(pipeline.getId(), equalTo("_id"));
assertThat(pipeline.getDescription(), equalTo("this is a place holder pipeline, because pipeline with" + " id [_id] could not be loaded"));
assertThat(pipeline.getProcessors().size(), equalTo(1));
assertNull(pipeline.getProcessors().get(0).getTag());
assertThat(pipeline.getProcessors().get(0).getType(), equalTo("unknown"));
}
Aggregations