Search in sources :

Example 1 with RowAdapter

use of org.apache.hop.pipeline.transform.RowAdapter in project hop by apache.

the class InjectDataSetIntoTransformExtensionPoint method callExtensionPoint.

@Override
public void callExtensionPoint(ILogChannel log, IVariables variables, final IPipelineEngine<PipelineMeta> pipeline) throws HopException {
    if (!(pipeline instanceof LocalPipelineEngine)) {
        throw new HopPluginException("Unit tests can only run using a local pipeline engine type");
    }
    final PipelineMeta pipelineMeta = pipeline.getPipelineMeta();
    boolean dataSetEnabled = "Y".equalsIgnoreCase(pipeline.getVariable(DataSetConst.VAR_RUN_UNIT_TEST));
    if (log.isDetailed()) {
        log.logDetailed("Data Set enabled? " + dataSetEnabled);
    }
    if (!dataSetEnabled) {
        return;
    }
    String unitTestName = pipeline.getVariable(DataSetConst.VAR_UNIT_TEST_NAME);
    if (log.isDetailed()) {
        log.logDetailed("Unit test name: " + unitTestName);
    }
    try {
        IHopMetadataProvider metadataProvider = pipelineMeta.getMetadataProvider();
        // 
        if (StringUtil.isEmpty(unitTestName)) {
            return;
        }
        PipelineUnitTest unitTest = metadataProvider.getSerializer(PipelineUnitTest.class).load(unitTestName);
        if (unitTest == null) {
            if (log.isDetailed()) {
                log.logDetailed("Unit test '" + unitTestName + "' could not be found");
            }
            return;
        }
        // 
        for (final TransformMeta transformMeta : pipeline.getPipelineMeta().getTransforms()) {
            String transformName = transformMeta.getName();
            PipelineUnitTestSetLocation inputLocation = unitTest.findInputLocation(transformName);
            if (inputLocation != null && StringUtils.isNotEmpty(inputLocation.getDataSetName())) {
                String inputDataSetName = inputLocation.getDataSetName();
                log.logDetailed("Data Set location found for transform '" + transformName + "' and data set  " + inputDataSetName);
                // We need to inject data from the data set with the specified name into the transform
                // 
                injectDataSetIntoTransform((LocalPipelineEngine) pipeline, inputDataSetName, metadataProvider, transformMeta, inputLocation);
            }
            // How about capturing rows for golden data review?
            // 
            PipelineUnitTestSetLocation goldenLocation = unitTest.findGoldenLocation(transformName);
            if (goldenLocation != null) {
                String goldenDataSetName = goldenLocation.getDataSetName();
                if (!StringUtil.isEmpty(goldenDataSetName)) {
                    log.logDetailed("Capturing rows for validation at pipeline end, transform='" + transformMeta.getName() + "', golden set '" + goldenDataSetName);
                    final RowCollection rowCollection = new RowCollection();
                    // Create a row collection map if it's missing...
                    // 
                    @SuppressWarnings("unchecked") Map<String, RowCollection> collectionMap = (Map<String, RowCollection>) pipeline.getExtensionDataMap().get(DataSetConst.ROW_COLLECTION_MAP);
                    if (collectionMap == null) {
                        collectionMap = new HashMap<>();
                        pipeline.getExtensionDataMap().put(DataSetConst.ROW_COLLECTION_MAP, collectionMap);
                    }
                    // Keep the map for safe keeping...
                    // 
                    collectionMap.put(transformMeta.getName(), rowCollection);
                    // We'll capture the rows from this one and then evaluate them after execution...
                    // 
                    IEngineComponent component = pipeline.findComponent(transformMeta.getName(), 0);
                    component.addRowListener(new RowAdapter() {

                        @Override
                        public void rowReadEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
                            if (rowCollection.getRowMeta() == null) {
                                rowCollection.setRowMeta(rowMeta);
                            }
                            rowCollection.getRows().add(row);
                        }
                    });
                }
            }
        }
    } catch (Throwable e) {
        throw new HopException("Unable to inject data set rows", e);
    }
}
Also used : PipelineUnitTestSetLocation(org.apache.hop.testing.PipelineUnitTestSetLocation) HopException(org.apache.hop.core.exception.HopException) IRowMeta(org.apache.hop.core.row.IRowMeta) HopPluginException(org.apache.hop.core.exception.HopPluginException) HopTransformException(org.apache.hop.core.exception.HopTransformException) IEngineComponent(org.apache.hop.pipeline.engine.IEngineComponent) PipelineMeta(org.apache.hop.pipeline.PipelineMeta) LocalPipelineEngine(org.apache.hop.pipeline.engines.local.LocalPipelineEngine) RowAdapter(org.apache.hop.pipeline.transform.RowAdapter) TransformMeta(org.apache.hop.pipeline.transform.TransformMeta) IHopMetadataProvider(org.apache.hop.metadata.api.IHopMetadataProvider) PipelineUnitTest(org.apache.hop.testing.PipelineUnitTest) HashMap(java.util.HashMap) Map(java.util.Map)

Example 2 with RowAdapter

use of org.apache.hop.pipeline.transform.RowAdapter in project hop by apache.

the class WriteToDataSetExtensionPoint method passTransformRowsToDataSet.

private void passTransformRowsToDataSet(final IPipelineEngine<PipelineMeta> pipeline, final PipelineMeta pipelineMeta, final TransformMeta transformMeta, final List<SourceToTargetMapping> mappings, final DataSet dataSet) throws HopException {
    // This is the transform to inject into the specified data set
    // 
    final IRowMeta setRowMeta = dataSet.getSetRowMeta();
    IEngineComponent component = pipeline.findComponent(transformMeta.getName(), 0);
    final List<Object[]> transformsForDbRows = new ArrayList<>();
    component.addRowListener(new RowAdapter() {

        @Override
        public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
            Object[] transformForDbRow = RowDataUtil.allocateRowData(setRowMeta.size());
            for (SourceToTargetMapping mapping : mappings) {
                transformForDbRow[mapping.getTargetPosition()] = row[mapping.getSourcePosition()];
            }
            transformsForDbRows.add(transformForDbRow);
        }
    });
    // At the end of the pipeline, write it...
    // 
    pipeline.addExecutionFinishedListener(engine -> DataSetCsvUtil.writeDataSetData(pipeline, dataSet, setRowMeta, transformsForDbRows));
}
Also used : IRowMeta(org.apache.hop.core.row.IRowMeta) RowAdapter(org.apache.hop.pipeline.transform.RowAdapter) ArrayList(java.util.ArrayList) SourceToTargetMapping(org.apache.hop.core.SourceToTargetMapping) HopTransformException(org.apache.hop.core.exception.HopTransformException) IEngineComponent(org.apache.hop.pipeline.engine.IEngineComponent)

Example 3 with RowAdapter

use of org.apache.hop.pipeline.transform.RowAdapter in project hop by apache.

the class PipelineDebugMeta method addRowListenersToPipeline.

public synchronized void addRowListenersToPipeline(final IPipelineEngine<PipelineMeta> pipeline) {
    // for every transform in the map, add a row listener...
    // 
    dataShown = false;
    for (final TransformMeta transformMeta : transformDebugMetaMap.keySet()) {
        final TransformDebugMeta transformDebugMeta = transformDebugMetaMap.get(transformMeta);
        // 
        for (IEngineComponent component : pipeline.getComponentCopies(transformMeta.getName())) {
            // 
            if (component instanceof ITransform) {
                ITransform baseTransform = (ITransform) component;
                baseTransform.addRowListener(new RowAdapter() {

                    @Override
                    public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
                        try {
                            synchronized (transformDebugMeta) {
                                // This block of code is called whenever there is a row written by the
                                // transform
                                // So we want to execute the debugging actions that are specified by the
                                // transform...
                                // 
                                int rowCount = transformDebugMeta.getRowCount();
                                if (transformDebugMeta.isReadingFirstRows() && rowCount > 0) {
                                    int bufferSize = transformDebugMeta.getRowBuffer().size();
                                    if (bufferSize < rowCount) {
                                        // This is the classic preview mode.
                                        // We add simply add the row to the buffer.
                                        // 
                                        transformDebugMeta.setRowBufferMeta(rowMeta);
                                        transformDebugMeta.getRowBuffer().add(rowMeta.cloneRow(row));
                                    } else {
                                        // pause the pipeline...
                                        // 
                                        pipeline.pauseExecution();
                                        // Also call the pause / break-point listeners on the transform
                                        // debugger...
                                        // 
                                        dataShown = true;
                                        transformDebugMeta.fireBreakPointListeners(PipelineDebugMeta.this);
                                    }
                                } else if (transformDebugMeta.isPausingOnBreakPoint() && transformDebugMeta.getCondition() != null) {
                                    // 
                                    if (rowCount > 0) {
                                        // Keep a number of rows in memory
                                        // Store them in a reverse order to keep it intuitive for the user.
                                        // 
                                        transformDebugMeta.setRowBufferMeta(rowMeta);
                                        transformDebugMeta.getRowBuffer().add(0, rowMeta.cloneRow(row));
                                        // Only keep a number of rows in memory
                                        // If we have too many, remove the last (oldest)
                                        // 
                                        int bufferSize = transformDebugMeta.getRowBuffer().size();
                                        if (bufferSize > rowCount) {
                                            transformDebugMeta.getRowBuffer().remove(bufferSize - 1);
                                        }
                                    } else {
                                        // 
                                        if (transformDebugMeta.getRowBuffer().isEmpty()) {
                                            transformDebugMeta.getRowBuffer().add(rowMeta.cloneRow(row));
                                        } else {
                                            transformDebugMeta.getRowBuffer().set(0, rowMeta.cloneRow(row));
                                        }
                                    }
                                    // 
                                    if (transformDebugMeta.getCondition().evaluate(rowMeta, row)) {
                                        // We hit the break-point: pause the pipeline
                                        // 
                                        pipeline.pauseExecution();
                                        // Also fire off the break point listeners...
                                        // 
                                        transformDebugMeta.fireBreakPointListeners(PipelineDebugMeta.this);
                                    }
                                }
                            }
                        } catch (HopException e) {
                            throw new HopTransformException(e);
                        }
                    }
                });
            }
        }
    }
    // 
    try {
        pipeline.addExecutionFinishedListener(p -> {
            if (dataShown) {
                return;
            }
            for (TransformMeta transformMeta : transformDebugMetaMap.keySet()) {
                TransformDebugMeta transformDebugMeta = transformDebugMetaMap.get(transformMeta);
                if (transformDebugMeta != null) {
                    List<Object[]> rowBuffer = transformDebugMeta.getRowBuffer();
                    if (rowBuffer != null && !rowBuffer.isEmpty()) {
                        transformDebugMeta.fireBreakPointListeners(this);
                    }
                }
            }
        });
    } catch (Exception e) {
        e.printStackTrace();
    }
}
Also used : ITransform(org.apache.hop.pipeline.transform.ITransform) HopException(org.apache.hop.core.exception.HopException) IRowMeta(org.apache.hop.core.row.IRowMeta) HopTransformException(org.apache.hop.core.exception.HopTransformException) IEngineComponent(org.apache.hop.pipeline.engine.IEngineComponent) HopTransformException(org.apache.hop.core.exception.HopTransformException) HopException(org.apache.hop.core.exception.HopException) RowAdapter(org.apache.hop.pipeline.transform.RowAdapter) TransformMeta(org.apache.hop.pipeline.transform.TransformMeta)

Example 4 with RowAdapter

use of org.apache.hop.pipeline.transform.RowAdapter in project hop by apache.

the class PipelineDataProbeXp method executeProbingPipeline.

/**
 * Execute a probing pipeline for the current pipeline. Add a listener to the transform copies.
 * Send the data to the PipelineDataProbe transform(s) in the probing pipeline
 *
 * @param pipelineProbe
 * @param dataProbeLocation
 * @param loggingPipelineFilename The pipeline to start for the location
 * @param pipeline The parent pipeline to listen to
 * @param variables
 * @throws HopException
 */
private synchronized void executeProbingPipeline(PipelineProbe pipelineProbe, DataProbeLocation dataProbeLocation, String loggingPipelineFilename, IPipelineEngine<PipelineMeta> pipeline, IVariables variables) throws HopException {
    PipelineMeta probingPipelineMeta = new PipelineMeta(loggingPipelineFilename, pipeline.getMetadataProvider(), true, variables);
    // Create a local pipeline engine...
    // 
    LocalPipelineEngine probingPipeline = new LocalPipelineEngine(probingPipelineMeta, variables, pipeline);
    // Flag it as a probing and logging pipeline so we don't try to probe or log ourselves...
    // 
    probingPipeline.getExtensionDataMap().put(PIPELINE_DATA_PROBE_FLAG, "Y");
    probingPipeline.getExtensionDataMap().put(PipelineStartLoggingXp.PIPELINE_LOGGING_FLAG, "Y");
    // Only log errors
    // 
    probingPipeline.setLogLevel(LogLevel.ERROR);
    probingPipeline.prepareExecution();
    List<IEngineComponent> componentCopies = pipeline.getComponentCopies(dataProbeLocation.getSourceTransformName());
    for (IEngineComponent componentCopy : componentCopies) {
        // 
        for (TransformMetaDataCombi combi : probingPipeline.getTransforms()) {
            if (combi.transform instanceof PipelineDataProbe) {
                // Give the transform a bit more information to work with...
                // 
                PipelineDataProbe pipelineDataProbe = (PipelineDataProbe) combi.transform;
                pipelineDataProbe.setSourcePipelineName(pipeline.getPipelineMeta().getName());
                pipelineDataProbe.setSourceTransformLogChannelId(pipeline.getLogChannelId());
                pipelineDataProbe.setSourceTransformName(componentCopy.getName());
                pipelineDataProbe.setSourceTransformCopy(componentCopy.getCopyNr());
                try {
                    final RowProducer rowProducer = probingPipeline.addRowProducer(combi.transformName, combi.copy);
                    // For every copy of the component, add an input row set to the parent pipeline...
                    // 
                    componentCopy.addRowListener(new RowAdapter() {

                        @Override
                        public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
                            // Pass this row to the row producer...
                            // 
                            rowProducer.putRow(rowMeta, row);
                        }
                    });
                    // If the pipeline we're the transform is and we can safely stop streaming...
                    // 
                    pipeline.addExecutionFinishedListener(pe -> rowProducer.finished());
                } catch (HopException e) {
                    throw new HopTransformException("Error adding row producer to transform '" + combi.transformName + "'", e);
                }
            }
        }
    }
    // Execute the logging pipeline to save the logging information
    // 
    probingPipeline.startThreads();
    // We'll not wait around until this is finished...
    // The pipeline should stop automatically when the parent does
    // 
    pipeline.addExecutionStoppedListener(e -> probingPipeline.stopAll());
}
Also used : RowProducer(org.apache.hop.pipeline.RowProducer) HopException(org.apache.hop.core.exception.HopException) IRowMeta(org.apache.hop.core.row.IRowMeta) HopTransformException(org.apache.hop.core.exception.HopTransformException) IEngineComponent(org.apache.hop.pipeline.engine.IEngineComponent) PipelineMeta(org.apache.hop.pipeline.PipelineMeta) LocalPipelineEngine(org.apache.hop.pipeline.engines.local.LocalPipelineEngine) RowAdapter(org.apache.hop.pipeline.transform.RowAdapter) FileObject(org.apache.commons.vfs2.FileObject) PipelineDataProbe(org.apache.hop.reflection.probe.transform.PipelineDataProbe) TransformMetaDataCombi(org.apache.hop.pipeline.transform.TransformMetaDataCombi)

Example 5 with RowAdapter

use of org.apache.hop.pipeline.transform.RowAdapter in project hop by apache.

the class CalculatorUnitTest method testAddSeconds.

@Test
public void testAddSeconds() throws HopException {
    RowMeta inputRowMeta = new RowMeta();
    ValueMetaDate dayMeta = new ValueMetaDate("Day");
    inputRowMeta.addValueMeta(dayMeta);
    ValueMetaInteger secondsMeta = new ValueMetaInteger("Seconds");
    inputRowMeta.addValueMeta(secondsMeta);
    IRowSet inputRowSet = null;
    try {
        inputRowSet = smh.getMockInputRowSet(new Object[][] { { new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2014-01-01 00:00:00"), new Long(10) }, { new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2014-10-31 23:59:50"), new Long(30) } });
    } catch (ParseException pe) {
        pe.printStackTrace();
        fail();
    }
    inputRowSet.setRowMeta(inputRowMeta);
    CalculatorMeta meta = new CalculatorMeta();
    meta.getFunctions().add(new CalculatorMetaFunction("new_day", CalculationType.ADD_SECONDS, "Day", "Seconds", null, "Date", 0, 0, "", "", "", "", false));
    CalculatorData data = new CalculatorData();
    Calculator calculator = new Calculator(smh.transformMeta, meta, data, 0, smh.pipelineMeta, smh.pipeline);
    calculator.addRowSetToInputRowSets(inputRowSet);
    calculator.setInputRowMeta(inputRowMeta);
    calculator.init();
    // Verify output
    try {
        calculator.addRowListener(new RowAdapter() {

            @Override
            public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
                try {
                    assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2014-01-01 00:00:10"), row[2]);
                } catch (ParseException pe) {
                    throw new HopTransformException(pe);
                }
            }
        });
        calculator.processRow();
    } catch (HopException ke) {
        ke.printStackTrace();
        fail();
    }
}
Also used : RowMeta(org.apache.hop.core.row.RowMeta) IRowMeta(org.apache.hop.core.row.IRowMeta) HopException(org.apache.hop.core.exception.HopException) IRowMeta(org.apache.hop.core.row.IRowMeta) HopTransformException(org.apache.hop.core.exception.HopTransformException) IRowSet(org.apache.hop.core.IRowSet) RowAdapter(org.apache.hop.pipeline.transform.RowAdapter) ILoggingObject(org.apache.hop.core.logging.ILoggingObject) ParseException(java.text.ParseException) SimpleDateFormat(java.text.SimpleDateFormat)

Aggregations

HopTransformException (org.apache.hop.core.exception.HopTransformException)16 IRowMeta (org.apache.hop.core.row.IRowMeta)16 RowAdapter (org.apache.hop.pipeline.transform.RowAdapter)16 HopException (org.apache.hop.core.exception.HopException)10 ILoggingObject (org.apache.hop.core.logging.ILoggingObject)7 RowMeta (org.apache.hop.core.row.RowMeta)6 PipelineMeta (org.apache.hop.pipeline.PipelineMeta)6 IEngineComponent (org.apache.hop.pipeline.engine.IEngineComponent)6 LocalPipelineEngine (org.apache.hop.pipeline.engines.local.LocalPipelineEngine)6 IRowSet (org.apache.hop.core.IRowSet)5 ArrayList (java.util.ArrayList)4 TransformMeta (org.apache.hop.pipeline.transform.TransformMeta)4 ITransform (org.apache.hop.pipeline.transform.ITransform)3 ParseException (java.text.ParseException)2 FileObject (org.apache.commons.vfs2.FileObject)2 IValueMeta (org.apache.hop.core.row.IValueMeta)2 IHopMetadataProvider (org.apache.hop.metadata.api.IHopMetadataProvider)2 Pipeline (org.apache.hop.pipeline.Pipeline)2 RowProducer (org.apache.hop.pipeline.RowProducer)2 ConnectionStringBuilder (com.microsoft.azure.eventhubs.ConnectionStringBuilder)1