use of org.apache.hop.pipeline.RowProducer in project hop by apache.
the class PipelineDataProbeXp method executeProbingPipeline.
/**
* Execute a probing pipeline for the current pipeline. Add a listener to the transform copies.
* Send the data to the PipelineDataProbe transform(s) in the probing pipeline
*
* @param pipelineProbe
* @param dataProbeLocation
* @param loggingPipelineFilename The pipeline to start for the location
* @param pipeline The parent pipeline to listen to
* @param variables
* @throws HopException
*/
private synchronized void executeProbingPipeline(PipelineProbe pipelineProbe, DataProbeLocation dataProbeLocation, String loggingPipelineFilename, IPipelineEngine<PipelineMeta> pipeline, IVariables variables) throws HopException {
PipelineMeta probingPipelineMeta = new PipelineMeta(loggingPipelineFilename, pipeline.getMetadataProvider(), true, variables);
// Create a local pipeline engine...
//
LocalPipelineEngine probingPipeline = new LocalPipelineEngine(probingPipelineMeta, variables, pipeline);
// Flag it as a probing and logging pipeline so we don't try to probe or log ourselves...
//
probingPipeline.getExtensionDataMap().put(PIPELINE_DATA_PROBE_FLAG, "Y");
probingPipeline.getExtensionDataMap().put(PipelineStartLoggingXp.PIPELINE_LOGGING_FLAG, "Y");
// Only log errors
//
probingPipeline.setLogLevel(LogLevel.ERROR);
probingPipeline.prepareExecution();
List<IEngineComponent> componentCopies = pipeline.getComponentCopies(dataProbeLocation.getSourceTransformName());
for (IEngineComponent componentCopy : componentCopies) {
//
for (TransformMetaDataCombi combi : probingPipeline.getTransforms()) {
if (combi.transform instanceof PipelineDataProbe) {
// Give the transform a bit more information to work with...
//
PipelineDataProbe pipelineDataProbe = (PipelineDataProbe) combi.transform;
pipelineDataProbe.setSourcePipelineName(pipeline.getPipelineMeta().getName());
pipelineDataProbe.setSourceTransformLogChannelId(pipeline.getLogChannelId());
pipelineDataProbe.setSourceTransformName(componentCopy.getName());
pipelineDataProbe.setSourceTransformCopy(componentCopy.getCopyNr());
try {
final RowProducer rowProducer = probingPipeline.addRowProducer(combi.transformName, combi.copy);
// For every copy of the component, add an input row set to the parent pipeline...
//
componentCopy.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
// Pass this row to the row producer...
//
rowProducer.putRow(rowMeta, row);
}
});
// If the pipeline we're the transform is and we can safely stop streaming...
//
pipeline.addExecutionFinishedListener(pe -> rowProducer.finished());
} catch (HopException e) {
throw new HopTransformException("Error adding row producer to transform '" + combi.transformName + "'", e);
}
}
}
}
// Execute the logging pipeline to save the logging information
//
probingPipeline.startThreads();
// We'll not wait around until this is finished...
// The pipeline should stop automatically when the parent does
//
pipeline.addExecutionStoppedListener(e -> probingPipeline.stopAll());
}
use of org.apache.hop.pipeline.RowProducer in project hop by apache.
the class SimpleMapping method processRow.
/**
* Process a single row. In our case, we send one row of data to a piece of pipeline. In the
* pipeline, we look up the MappingInput transform to send our rows to it. As a consequence, for
* the time being, there can only be one MappingInput and one MappingOutput transform in the
* Mapping.
*/
@Override
public boolean processRow() throws HopException {
try {
if (first) {
first = false;
data.wasStarted = true;
// Rows read are injected into the one available Mapping Input transform
//
String mappingInputTransformName = data.mappingInput.getTransformName();
RowProducer rowProducer = data.mappingPipeline.addRowProducer(mappingInputTransformName, 0);
data.rowDataInputMapper = new RowDataInputMapper(meta.getInputMapping(), rowProducer);
// Rows produced by the mapping are read and passed on.
//
String mappingOutputTransformName = data.mappingOutput.getTransformName();
ITransform iOutputTransform = data.mappingPipeline.getTransform(mappingOutputTransformName, 0);
RowOutputDataMapper outputDataMapper = new RowOutputDataMapper(meta.getInputMapping(), meta.getOutputMapping(), this::putRow);
iOutputTransform.addRowListener(outputDataMapper);
// Start the mapping/sub- pipeline threads
//
data.mappingPipeline.startThreads();
}
// The data we read we pass to the mapping
//
Object[] row = getRow();
boolean rowWasPut = false;
if (row != null) {
while (!(data.mappingPipeline.isFinishedOrStopped() || rowWasPut)) {
rowWasPut = data.rowDataInputMapper.putRow(getInputRowMeta(), row);
}
}
if (!rowWasPut) {
data.rowDataInputMapper.finished();
data.mappingPipeline.waitUntilFinished();
setOutputDone();
return false;
}
return true;
} catch (Throwable t) {
//
if (data.mappingPipeline != null) {
data.mappingPipeline.stopAll();
}
//
throw new HopException(t);
}
}
use of org.apache.hop.pipeline.RowProducer in project hop by apache.
the class SimpleMappingTest method setup.
@Before
public void setup() throws Exception {
transformMockHelper = new TransformMockHelper<>("SIMPLE_MAPPING_TEST", SimpleMappingMeta.class, SimpleMappingData.class);
when(transformMockHelper.logChannelFactory.create(any(), any(ILoggingObject.class))).thenReturn(transformMockHelper.iLogChannel);
when(transformMockHelper.pipeline.isRunning()).thenReturn(true);
// Mock for MappingInput
MappingInput mpInputMock = mock(MappingInput.class);
when(mpInputMock.getTransformName()).thenReturn(MAPPING_INPUT_TRANSFORM_NAME);
// Mock for MappingOutput
MappingOutput mpOutputMock = mock(MappingOutput.class);
when(mpOutputMock.getTransformName()).thenReturn(MAPPING_OUTPUT_TRANSFORM_NAME);
// Mock for RowDataInputMapper
RowDataInputMapper rdInputMpMock = mock(RowDataInputMapper.class);
IRowMeta rwMetaInMock = mock(RowMeta.class);
doReturn(Boolean.TRUE).when(rdInputMpMock).putRow(rwMetaInMock, new Object[] {});
// Mock for RowProducer
RowProducer rProducerMock = mock(RowProducer.class);
when(rProducerMock.putRow(any(IRowMeta.class), any(Object[].class), anyBoolean())).thenReturn(true);
// Mock for MappingIODefinition
MappingIODefinition mpIODefMock = mock(MappingIODefinition.class);
// Set up real SimpleMappingData with some mocked elements
simpleMpData.mappingInput = mpInputMock;
simpleMpData.mappingOutput = mpOutputMock;
simpleMpData.rowDataInputMapper = rdInputMpMock;
simpleMpData.mappingPipeline = transformMockHelper.pipeline;
Mockito.doReturn(mpOutputMock).when(transformMockHelper.pipeline).getTransform(MAPPING_OUTPUT_TRANSFORM_NAME, 0);
Mockito.doReturn(rProducerMock).when(transformMockHelper.pipeline).addRowProducer(MAPPING_INPUT_TRANSFORM_NAME, 0);
when(transformMockHelper.iTransformMeta.getInputMapping()).thenReturn(mpIODefMock);
}
use of org.apache.hop.pipeline.RowProducer in project hop by apache.
the class SimpleMappingTest method testTransformShouldStopProcessingInput_IfUnderlyingTransitionIsStopped.
@Test
public void testTransformShouldStopProcessingInput_IfUnderlyingTransitionIsStopped() throws Exception {
MappingInput mappingInput = mock(MappingInput.class);
when(mappingInput.getTransformName()).thenReturn(MAPPING_INPUT_TRANSFORM_NAME);
transformMockHelper.iTransformData.mappingInput = mappingInput;
RowProducer rowProducer = mock(RowProducer.class);
when(rowProducer.putRow(any(IRowMeta.class), any(Object[].class), anyBoolean())).thenReturn(true);
ITransform transform = mock(ITransform.class);
Pipeline mappingPipeline = mock(Pipeline.class);
when(mappingPipeline.addRowProducer(anyString(), anyInt())).thenReturn(rowProducer);
when(mappingPipeline.getTransform(anyString(), anyInt())).thenReturn(transform);
when(mappingPipeline.isFinishedOrStopped()).thenReturn(Boolean.FALSE).thenReturn(Boolean.TRUE);
transformMockHelper.iTransformData.mappingPipeline = mappingPipeline;
MappingOutput mappingOutput = mock(MappingOutput.class);
when(mappingOutput.getTransformName()).thenReturn(MAPPING_OUTPUT_TRANSFORM_NAME);
transformMockHelper.iTransformData.mappingOutput = mappingOutput;
smp = new SimpleMapping(transformMockHelper.transformMeta, transformMockHelper.iTransformMeta, transformMockHelper.iTransformData, 0, transformMockHelper.pipelineMeta, transformMockHelper.pipeline);
smp.init();
smp.addRowSetToInputRowSets(transformMockHelper.getMockInputRowSet(new Object[] {}));
smp.addRowSetToInputRowSets(transformMockHelper.getMockInputRowSet(new Object[] {}));
assertTrue(smp.processRow());
assertFalse(smp.processRow());
}
use of org.apache.hop.pipeline.RowProducer in project hop by apache.
the class CheckSumTest method executeHexTest.
/**
* Create, execute, and return the row listener attached to the output transform with complete
* results from the execution.
*
* @param checkSumType Type of checksum to use
* @param input String to calculate checksum for
* @param meta meta to be used
* @return IRowListener with results.
*/
private MockRowListener executeHexTest(CheckSumMeta.CheckSumType checkSumType, Object input, IValueMeta meta) throws Exception {
Pipeline pipeline = buildHexadecimalChecksumPipeline(checkSumType);
pipeline.prepareExecution();
ITransform output = pipeline.getRunThread("Output", 0);
MockRowListener listener = new MockRowListener();
output.addRowListener(listener);
RowProducer rp = pipeline.addRowProducer("CheckSum", 0);
RowMeta inputRowMeta = createStringRowMeta(meta);
((BaseTransform) pipeline.getRunThread("CheckSum", 0)).setInputRowMeta(inputRowMeta);
pipeline.startThreads();
rp.putRow(inputRowMeta, new Object[] { input });
rp.finished();
pipeline.waitUntilFinished();
pipeline.stopAll();
pipeline.cleanup();
return listener;
}
Aggregations