use of com.amazon.dataprepper.pipeline.Pipeline in project data-prepper by opensearch-project.
the class ListPipelinesHandlerTest method testGivenNoPipelinesThenResponseWritten.
@Test
public void testGivenNoPipelinesThenResponseWritten() throws IOException {
final DataPrepper dataPrepper = mock(DataPrepper.class);
final HttpExchange httpExchange = mock(HttpExchange.class);
final Headers headers = mock(Headers.class);
final OutputStream outputStream = mock(OutputStream.class);
final Map<String, Pipeline> transformationPipelines = new HashMap<>();
when(dataPrepper.getTransformationPipelines()).thenReturn(transformationPipelines);
when(httpExchange.getResponseHeaders()).thenReturn(headers);
when(httpExchange.getResponseBody()).thenReturn(outputStream);
final ListPipelinesHandler handler = new ListPipelinesHandler(dataPrepper);
handler.handle(httpExchange);
verify(headers).add(eq("Content-Type"), eq("text/plain; charset=UTF-8"));
verify(httpExchange).sendResponseHeaders(eq(HttpURLConnection.HTTP_OK), anyLong());
verify(outputStream).write(any(byte[].class));
verify(outputStream).close();
}
use of com.amazon.dataprepper.pipeline.Pipeline in project data-prepper by opensearch-project.
the class ListPipelinesHandlerTest method testGivenPipelinesThenResponseWritten.
@Test
public void testGivenPipelinesThenResponseWritten() throws IOException {
final DataPrepper dataPrepper = mock(DataPrepper.class);
final HttpExchange httpExchange = mock(HttpExchange.class);
final Headers headers = mock(Headers.class);
final OutputStream outputStream = mock(OutputStream.class);
final Pipeline pipeline = mock(Pipeline.class);
final Map<String, Pipeline> transformationPipelines = new HashMap<>();
transformationPipelines.put("Pipeline A", pipeline);
transformationPipelines.put("Pipeline B", pipeline);
transformationPipelines.put("Pipeline C", pipeline);
when(dataPrepper.getTransformationPipelines()).thenReturn(transformationPipelines);
when(httpExchange.getResponseHeaders()).thenReturn(headers);
when(httpExchange.getResponseBody()).thenReturn(outputStream);
final ListPipelinesHandler handler = new ListPipelinesHandler(dataPrepper);
handler.handle(httpExchange);
verify(headers).add(eq("Content-Type"), eq("text/plain; charset=UTF-8"));
verify(httpExchange).sendResponseHeaders(eq(HttpURLConnection.HTTP_OK), anyLong());
verify(outputStream).write(any(byte[].class));
verify(outputStream).close();
}
use of com.amazon.dataprepper.pipeline.Pipeline in project data-prepper by opensearch-project.
the class DataPrepper method shutdown.
/**
* Triggers the shutdown of all configured valid pipelines.
*/
public void shutdown() {
for (final Pipeline pipeline : transformationPipelines.values()) {
LOG.info("Shutting down pipeline: {}", pipeline.getName());
pipeline.shutdown();
}
}
use of com.amazon.dataprepper.pipeline.Pipeline in project data-prepper by opensearch-project.
the class DataPrepperTests method testDataPrepperShutdownPipeline.
@Test
public void testDataPrepperShutdownPipeline() {
final Pipeline randomPipeline = mock(Pipeline.class);
parseConfigurationFixture.put("Random Pipeline", randomPipeline);
dataPrepper.shutdown("Random Pipeline");
verify(randomPipeline).shutdown();
}
use of com.amazon.dataprepper.pipeline.Pipeline in project data-prepper by opensearch-project.
the class PipelineParser method buildPipelineFromConfiguration.
private void buildPipelineFromConfiguration(final String pipelineName, final Map<String, PipelineConfiguration> pipelineConfigurationMap, final Map<String, Pipeline> pipelineMap) {
final PipelineConfiguration pipelineConfiguration = pipelineConfigurationMap.get(pipelineName);
LOG.info("Building pipeline [{}] from provided configuration", pipelineName);
try {
final PluginSetting sourceSetting = pipelineConfiguration.getSourcePluginSetting();
final Optional<Source> pipelineSource = getSourceIfPipelineType(pipelineName, sourceSetting, pipelineMap, pipelineConfigurationMap);
final Source source = pipelineSource.orElseGet(() -> pluginFactory.loadPlugin(Source.class, sourceSetting));
LOG.info("Building buffer for the pipeline [{}]", pipelineName);
final Buffer buffer = pluginFactory.loadPlugin(Buffer.class, pipelineConfiguration.getBufferPluginSetting());
LOG.info("Building processors for the pipeline [{}]", pipelineName);
final int processorThreads = pipelineConfiguration.getWorkers();
final List<List<Processor>> processorSets = pipelineConfiguration.getProcessorPluginSettings().stream().map(this::newProcessor).collect(Collectors.toList());
final int readBatchDelay = pipelineConfiguration.getReadBatchDelay();
LOG.info("Building sinks for the pipeline [{}]", pipelineName);
final List<Sink> sinks = pipelineConfiguration.getSinkPluginSettings().stream().map(this::buildSinkOrConnector).collect(Collectors.toList());
final Pipeline pipeline = new Pipeline(pipelineName, source, buffer, processorSets, sinks, processorThreads, readBatchDelay);
pipelineMap.put(pipelineName, pipeline);
} catch (Exception ex) {
// If pipeline construction errors out, we will skip that pipeline and proceed
LOG.error("Construction of pipeline components failed, skipping building of pipeline [{}] and its connected " + "pipelines", pipelineName, ex);
processRemoveIfRequired(pipelineName, pipelineConfigurationMap, pipelineMap);
}
}
Aggregations