use of org.apache.hop.pipeline.transform.ITransform in project hop by apache.
the class TransformCreatedXp method callExtensionPoint.
@Override
public void callExtensionPoint(ILogChannel log, IVariables variables, TransformInitThread initThread) throws HopException {
ITransform transform = initThread.getCombi().transform;
Pipeline pipeline = initThread.getPipeline();
String connectionGroup = (String) pipeline.getExtensionDataMap().get(Const.CONNECTION_GROUP);
if (connectionGroup != null) {
// Pass the value down to the transform...
// We do this before the transform initializes and perhaps asks for a new connection
//
transform.getExtensionDataMap().put(Const.CONNECTION_GROUP, connectionGroup);
}
}
use of org.apache.hop.pipeline.transform.ITransform in project hop by apache.
the class PipelineDebugMeta method addRowListenersToPipeline.
public synchronized void addRowListenersToPipeline(final IPipelineEngine<PipelineMeta> pipeline) {
// for every transform in the map, add a row listener...
//
dataShown = false;
for (final TransformMeta transformMeta : transformDebugMetaMap.keySet()) {
final TransformDebugMeta transformDebugMeta = transformDebugMetaMap.get(transformMeta);
//
for (IEngineComponent component : pipeline.getComponentCopies(transformMeta.getName())) {
//
if (component instanceof ITransform) {
ITransform baseTransform = (ITransform) component;
baseTransform.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
try {
synchronized (transformDebugMeta) {
// This block of code is called whenever there is a row written by the
// transform
// So we want to execute the debugging actions that are specified by the
// transform...
//
int rowCount = transformDebugMeta.getRowCount();
if (transformDebugMeta.isReadingFirstRows() && rowCount > 0) {
int bufferSize = transformDebugMeta.getRowBuffer().size();
if (bufferSize < rowCount) {
// This is the classic preview mode.
// We add simply add the row to the buffer.
//
transformDebugMeta.setRowBufferMeta(rowMeta);
transformDebugMeta.getRowBuffer().add(rowMeta.cloneRow(row));
} else {
// pause the pipeline...
//
pipeline.pauseExecution();
// Also call the pause / break-point listeners on the transform
// debugger...
//
dataShown = true;
transformDebugMeta.fireBreakPointListeners(PipelineDebugMeta.this);
}
} else if (transformDebugMeta.isPausingOnBreakPoint() && transformDebugMeta.getCondition() != null) {
//
if (rowCount > 0) {
// Keep a number of rows in memory
// Store them in a reverse order to keep it intuitive for the user.
//
transformDebugMeta.setRowBufferMeta(rowMeta);
transformDebugMeta.getRowBuffer().add(0, rowMeta.cloneRow(row));
// Only keep a number of rows in memory
// If we have too many, remove the last (oldest)
//
int bufferSize = transformDebugMeta.getRowBuffer().size();
if (bufferSize > rowCount) {
transformDebugMeta.getRowBuffer().remove(bufferSize - 1);
}
} else {
//
if (transformDebugMeta.getRowBuffer().isEmpty()) {
transformDebugMeta.getRowBuffer().add(rowMeta.cloneRow(row));
} else {
transformDebugMeta.getRowBuffer().set(0, rowMeta.cloneRow(row));
}
}
//
if (transformDebugMeta.getCondition().evaluate(rowMeta, row)) {
// We hit the break-point: pause the pipeline
//
pipeline.pauseExecution();
// Also fire off the break point listeners...
//
transformDebugMeta.fireBreakPointListeners(PipelineDebugMeta.this);
}
}
}
} catch (HopException e) {
throw new HopTransformException(e);
}
}
});
}
}
}
//
try {
pipeline.addExecutionFinishedListener(p -> {
if (dataShown) {
return;
}
for (TransformMeta transformMeta : transformDebugMetaMap.keySet()) {
TransformDebugMeta transformDebugMeta = transformDebugMetaMap.get(transformMeta);
if (transformDebugMeta != null) {
List<Object[]> rowBuffer = transformDebugMeta.getRowBuffer();
if (rowBuffer != null && !rowBuffer.isEmpty()) {
transformDebugMeta.fireBreakPointListeners(this);
}
}
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.apache.hop.pipeline.transform.ITransform in project hop by apache.
the class SimpleMapping method processRow.
/**
* Process a single row. In our case, we send one row of data to a piece of pipeline. In the
* pipeline, we look up the MappingInput transform to send our rows to it. As a consequence, for
* the time being, there can only be one MappingInput and one MappingOutput transform in the
* Mapping.
*/
@Override
public boolean processRow() throws HopException {
try {
if (first) {
first = false;
data.wasStarted = true;
// Rows read are injected into the one available Mapping Input transform
//
String mappingInputTransformName = data.mappingInput.getTransformName();
RowProducer rowProducer = data.mappingPipeline.addRowProducer(mappingInputTransformName, 0);
data.rowDataInputMapper = new RowDataInputMapper(meta.getInputMapping(), rowProducer);
// Rows produced by the mapping are read and passed on.
//
String mappingOutputTransformName = data.mappingOutput.getTransformName();
ITransform iOutputTransform = data.mappingPipeline.getTransform(mappingOutputTransformName, 0);
RowOutputDataMapper outputDataMapper = new RowOutputDataMapper(meta.getInputMapping(), meta.getOutputMapping(), this::putRow);
iOutputTransform.addRowListener(outputDataMapper);
// Start the mapping/sub- pipeline threads
//
data.mappingPipeline.startThreads();
}
// The data we read we pass to the mapping
//
Object[] row = getRow();
boolean rowWasPut = false;
if (row != null) {
while (!(data.mappingPipeline.isFinishedOrStopped() || rowWasPut)) {
rowWasPut = data.rowDataInputMapper.putRow(getInputRowMeta(), row);
}
}
if (!rowWasPut) {
data.rowDataInputMapper.finished();
data.mappingPipeline.waitUntilFinished();
setOutputDone();
return false;
}
return true;
} catch (Throwable t) {
//
if (data.mappingPipeline != null) {
data.mappingPipeline.stopAll();
}
//
throw new HopException(t);
}
}
use of org.apache.hop.pipeline.transform.ITransform in project hop by apache.
the class SimpleMappingTest method testTransformShouldStopProcessingInput_IfUnderlyingTransitionIsStopped.
@Test
public void testTransformShouldStopProcessingInput_IfUnderlyingTransitionIsStopped() throws Exception {
MappingInput mappingInput = mock(MappingInput.class);
when(mappingInput.getTransformName()).thenReturn(MAPPING_INPUT_TRANSFORM_NAME);
transformMockHelper.iTransformData.mappingInput = mappingInput;
RowProducer rowProducer = mock(RowProducer.class);
when(rowProducer.putRow(any(IRowMeta.class), any(Object[].class), anyBoolean())).thenReturn(true);
ITransform transform = mock(ITransform.class);
Pipeline mappingPipeline = mock(Pipeline.class);
when(mappingPipeline.addRowProducer(anyString(), anyInt())).thenReturn(rowProducer);
when(mappingPipeline.getTransform(anyString(), anyInt())).thenReturn(transform);
when(mappingPipeline.isFinishedOrStopped()).thenReturn(Boolean.FALSE).thenReturn(Boolean.TRUE);
transformMockHelper.iTransformData.mappingPipeline = mappingPipeline;
MappingOutput mappingOutput = mock(MappingOutput.class);
when(mappingOutput.getTransformName()).thenReturn(MAPPING_OUTPUT_TRANSFORM_NAME);
transformMockHelper.iTransformData.mappingOutput = mappingOutput;
smp = new SimpleMapping(transformMockHelper.transformMeta, transformMockHelper.iTransformMeta, transformMockHelper.iTransformData, 0, transformMockHelper.pipelineMeta, transformMockHelper.pipeline);
smp.init();
smp.addRowSetToInputRowSets(transformMockHelper.getMockInputRowSet(new Object[] {}));
smp.addRowSetToInputRowSets(transformMockHelper.getMockInputRowSet(new Object[] {}));
assertTrue(smp.processRow());
assertFalse(smp.processRow());
}
use of org.apache.hop.pipeline.transform.ITransform in project hop by apache.
the class SniffTransformServletTest method testSniffTransformServletEscapesHtmlWhenPipelineFound.
@Test
@PrepareForTest({ Encode.class })
public void testSniffTransformServletEscapesHtmlWhenPipelineFound() throws ServletException, IOException {
HopLogStore.init();
HttpServletRequest mockHttpServletRequest = mock(HttpServletRequest.class);
HttpServletResponse mockHttpServletResponse = mock(HttpServletResponse.class);
Pipeline mockPipeline = mock(Pipeline.class);
PipelineMeta mockPipelineMeta = mock(PipelineMeta.class);
ITransform mockTransform = mock(ITransform.class);
List<ITransform> transforms = new ArrayList<>();
transforms.add(mockTransform);
ILogChannel mockChannelInterface = mock(ILogChannel.class);
StringWriter out = new StringWriter();
PrintWriter printWriter = new PrintWriter(out);
PowerMockito.spy(Encode.class);
when(mockHttpServletRequest.getContextPath()).thenReturn(SniffTransformServlet.CONTEXT_PATH);
when(mockHttpServletRequest.getParameter(anyString())).thenReturn(ServletTestUtils.BAD_STRING_TO_TEST);
when(mockHttpServletResponse.getWriter()).thenReturn(printWriter);
when(mockPipelineMap.getPipeline(any(HopServerObjectEntry.class))).thenReturn(mockPipeline);
when(mockPipeline.getLogChannel()).thenReturn(mockChannelInterface);
when(mockPipeline.getLogChannelId()).thenReturn("test");
when(mockPipeline.getPipelineMeta()).thenReturn(mockPipelineMeta);
when(mockPipelineMeta.getMaximum()).thenReturn(new Point(10, 10));
when(mockPipeline.getTransforms(ServletTestUtils.BAD_STRING_TO_TEST)).thenReturn(transforms);
sniffTransformServlet.doGet(mockHttpServletRequest, mockHttpServletResponse);
assertFalse(ServletTestUtils.hasBadText(ServletTestUtils.getInsideOfTag("H1", out.toString())));
PowerMockito.verifyStatic(Encode.class);
Encode.forHtml(anyString());
}
Aggregations