use of org.apache.hop.pipeline.engines.local.LocalPipelineEngine in project hop by apache.
the class InjectDataSetIntoTransformExtensionPoint method callExtensionPoint.
@Override
public void callExtensionPoint(ILogChannel log, IVariables variables, final IPipelineEngine<PipelineMeta> pipeline) throws HopException {
if (!(pipeline instanceof LocalPipelineEngine)) {
throw new HopPluginException("Unit tests can only run using a local pipeline engine type");
}
final PipelineMeta pipelineMeta = pipeline.getPipelineMeta();
boolean dataSetEnabled = "Y".equalsIgnoreCase(pipeline.getVariable(DataSetConst.VAR_RUN_UNIT_TEST));
if (log.isDetailed()) {
log.logDetailed("Data Set enabled? " + dataSetEnabled);
}
if (!dataSetEnabled) {
return;
}
String unitTestName = pipeline.getVariable(DataSetConst.VAR_UNIT_TEST_NAME);
if (log.isDetailed()) {
log.logDetailed("Unit test name: " + unitTestName);
}
try {
IHopMetadataProvider metadataProvider = pipelineMeta.getMetadataProvider();
//
if (StringUtil.isEmpty(unitTestName)) {
return;
}
PipelineUnitTest unitTest = metadataProvider.getSerializer(PipelineUnitTest.class).load(unitTestName);
if (unitTest == null) {
if (log.isDetailed()) {
log.logDetailed("Unit test '" + unitTestName + "' could not be found");
}
return;
}
//
for (final TransformMeta transformMeta : pipeline.getPipelineMeta().getTransforms()) {
String transformName = transformMeta.getName();
PipelineUnitTestSetLocation inputLocation = unitTest.findInputLocation(transformName);
if (inputLocation != null && StringUtils.isNotEmpty(inputLocation.getDataSetName())) {
String inputDataSetName = inputLocation.getDataSetName();
log.logDetailed("Data Set location found for transform '" + transformName + "' and data set " + inputDataSetName);
// We need to inject data from the data set with the specified name into the transform
//
injectDataSetIntoTransform((LocalPipelineEngine) pipeline, inputDataSetName, metadataProvider, transformMeta, inputLocation);
}
// How about capturing rows for golden data review?
//
PipelineUnitTestSetLocation goldenLocation = unitTest.findGoldenLocation(transformName);
if (goldenLocation != null) {
String goldenDataSetName = goldenLocation.getDataSetName();
if (!StringUtil.isEmpty(goldenDataSetName)) {
log.logDetailed("Capturing rows for validation at pipeline end, transform='" + transformMeta.getName() + "', golden set '" + goldenDataSetName);
final RowCollection rowCollection = new RowCollection();
// Create a row collection map if it's missing...
//
@SuppressWarnings("unchecked") Map<String, RowCollection> collectionMap = (Map<String, RowCollection>) pipeline.getExtensionDataMap().get(DataSetConst.ROW_COLLECTION_MAP);
if (collectionMap == null) {
collectionMap = new HashMap<>();
pipeline.getExtensionDataMap().put(DataSetConst.ROW_COLLECTION_MAP, collectionMap);
}
// Keep the map for safe keeping...
//
collectionMap.put(transformMeta.getName(), rowCollection);
// We'll capture the rows from this one and then evaluate them after execution...
//
IEngineComponent component = pipeline.findComponent(transformMeta.getName(), 0);
component.addRowListener(new RowAdapter() {
@Override
public void rowReadEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
if (rowCollection.getRowMeta() == null) {
rowCollection.setRowMeta(rowMeta);
}
rowCollection.getRows().add(row);
}
});
}
}
}
} catch (Throwable e) {
throw new HopException("Unable to inject data set rows", e);
}
}
use of org.apache.hop.pipeline.engines.local.LocalPipelineEngine in project hop by apache.
the class ScriptValueAddFunctions_SetVariableScopeTest method createPipeline.
private Pipeline createPipeline() {
Pipeline pipeline = new LocalPipelineEngine();
pipeline.setLogChannel(log);
pipeline = spy(pipeline);
return pipeline;
}
use of org.apache.hop.pipeline.engines.local.LocalPipelineEngine in project hop by apache.
the class BaseParsingTest method beforeCommon.
/**
* Initialize transform info. Method is final against redefine in descendants.
*/
@Before
public final void beforeCommon() throws Exception {
HopEnvironment.init();
PluginRegistry.addPluginType(CompressionPluginType.getInstance());
PluginRegistry.init();
transformMeta = new TransformMeta();
transformMeta.setName("test");
pipeline = new LocalPipelineEngine();
pipeline.setLogChannel(log);
pipeline.setRunning(true);
pipelineMeta = new PipelineMeta() {
@Override
public TransformMeta findTransform(String name) {
return transformMeta;
}
};
fs = VFS.getManager();
inPrefix = '/' + this.getClass().getPackage().getName().replace('.', '/') + "/files/";
}
use of org.apache.hop.pipeline.engines.local.LocalPipelineEngine in project hop by apache.
the class PGBulkLoaderMetaTest method setUp.
@Before
public void setUp() {
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName("loader");
lm = new PGBulkLoaderMeta();
ld = new PGBulkLoaderData();
PluginRegistry plugReg = PluginRegistry.getInstance();
String loaderPid = plugReg.getPluginId(TransformPluginType.class, lm);
transformMeta = new TransformMeta(loaderPid, "loader", lm);
Pipeline pipeline = new LocalPipelineEngine(pipelineMeta);
pipelineMeta.addTransform(transformMeta);
loader = new PGBulkLoader(transformMeta, lm, ld, 1, pipelineMeta, pipeline);
}
use of org.apache.hop.pipeline.engines.local.LocalPipelineEngine in project hop by apache.
the class PipelineDataProbeXp method executeProbingPipeline.
/**
* Execute a probing pipeline for the current pipeline. Add a listener to the transform copies.
* Send the data to the PipelineDataProbe transform(s) in the probing pipeline
*
* @param pipelineProbe
* @param dataProbeLocation
* @param loggingPipelineFilename The pipeline to start for the location
* @param pipeline The parent pipeline to listen to
* @param variables
* @throws HopException
*/
private synchronized void executeProbingPipeline(PipelineProbe pipelineProbe, DataProbeLocation dataProbeLocation, String loggingPipelineFilename, IPipelineEngine<PipelineMeta> pipeline, IVariables variables) throws HopException {
PipelineMeta probingPipelineMeta = new PipelineMeta(loggingPipelineFilename, pipeline.getMetadataProvider(), true, variables);
// Create a local pipeline engine...
//
LocalPipelineEngine probingPipeline = new LocalPipelineEngine(probingPipelineMeta, variables, pipeline);
// Flag it as a probing and logging pipeline so we don't try to probe or log ourselves...
//
probingPipeline.getExtensionDataMap().put(PIPELINE_DATA_PROBE_FLAG, "Y");
probingPipeline.getExtensionDataMap().put(PipelineStartLoggingXp.PIPELINE_LOGGING_FLAG, "Y");
// Only log errors
//
probingPipeline.setLogLevel(LogLevel.ERROR);
probingPipeline.prepareExecution();
List<IEngineComponent> componentCopies = pipeline.getComponentCopies(dataProbeLocation.getSourceTransformName());
for (IEngineComponent componentCopy : componentCopies) {
//
for (TransformMetaDataCombi combi : probingPipeline.getTransforms()) {
if (combi.transform instanceof PipelineDataProbe) {
// Give the transform a bit more information to work with...
//
PipelineDataProbe pipelineDataProbe = (PipelineDataProbe) combi.transform;
pipelineDataProbe.setSourcePipelineName(pipeline.getPipelineMeta().getName());
pipelineDataProbe.setSourceTransformLogChannelId(pipeline.getLogChannelId());
pipelineDataProbe.setSourceTransformName(componentCopy.getName());
pipelineDataProbe.setSourceTransformCopy(componentCopy.getCopyNr());
try {
final RowProducer rowProducer = probingPipeline.addRowProducer(combi.transformName, combi.copy);
// For every copy of the component, add an input row set to the parent pipeline...
//
componentCopy.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
// Pass this row to the row producer...
//
rowProducer.putRow(rowMeta, row);
}
});
// If the pipeline we're the transform is and we can safely stop streaming...
//
pipeline.addExecutionFinishedListener(pe -> rowProducer.finished());
} catch (HopException e) {
throw new HopTransformException("Error adding row producer to transform '" + combi.transformName + "'", e);
}
}
}
}
// Execute the logging pipeline to save the logging information
//
probingPipeline.startThreads();
// We'll not wait around until this is finished...
// The pipeline should stop automatically when the parent does
//
pipeline.addExecutionStoppedListener(e -> probingPipeline.stopAll());
}
Aggregations