use of org.apache.hop.pipeline.PipelineMeta in project hop by apache.
the class HopRun method runPipeline.
private void runPipeline(CommandLine cmd, ILogChannel log, PipelineExecutionConfiguration configuration, PipelineMeta pipelineMeta) {
try {
String pipelineRunConfigurationName = variables.resolve(configuration.getRunConfiguration());
IPipelineEngine<PipelineMeta> pipeline = PipelineEngineFactory.createPipelineEngine(variables, pipelineRunConfigurationName, metadataProvider, pipelineMeta);
pipeline.getPipelineMeta().setInternalHopVariables(pipeline);
pipeline.initializeFrom(null);
pipeline.setVariables(configuration.getVariablesMap());
// configure the variables and parameters
//
pipeline.copyParametersFromDefinitions(pipelineMeta);
configureParametersAndVariables(cmd, configuration, pipeline, pipeline);
pipeline.setLogLevel(configuration.getLogLevel());
pipeline.setMetadataProvider(metadataProvider);
pipeline.activateParameters(pipeline);
log.logMinimal("Starting pipeline: " + pipelineMeta.getFilename());
// Run it!
//
pipeline.prepareExecution();
pipeline.startThreads();
pipeline.waitUntilFinished();
// TODO: how to see if a pipeline fails? getresult always return true
setFinishedWithoutError(true);
} catch (Exception e) {
throw new ExecutionException(cmd, "Error running pipeline locally", e);
}
}
use of org.apache.hop.pipeline.PipelineMeta in project hop by apache.
the class InjectDataSetIntoTransformExtensionPoint method callExtensionPoint.
@Override
public void callExtensionPoint(ILogChannel log, IVariables variables, final IPipelineEngine<PipelineMeta> pipeline) throws HopException {
if (!(pipeline instanceof LocalPipelineEngine)) {
throw new HopPluginException("Unit tests can only run using a local pipeline engine type");
}
final PipelineMeta pipelineMeta = pipeline.getPipelineMeta();
boolean dataSetEnabled = "Y".equalsIgnoreCase(pipeline.getVariable(DataSetConst.VAR_RUN_UNIT_TEST));
if (log.isDetailed()) {
log.logDetailed("Data Set enabled? " + dataSetEnabled);
}
if (!dataSetEnabled) {
return;
}
String unitTestName = pipeline.getVariable(DataSetConst.VAR_UNIT_TEST_NAME);
if (log.isDetailed()) {
log.logDetailed("Unit test name: " + unitTestName);
}
try {
IHopMetadataProvider metadataProvider = pipelineMeta.getMetadataProvider();
//
if (StringUtil.isEmpty(unitTestName)) {
return;
}
PipelineUnitTest unitTest = metadataProvider.getSerializer(PipelineUnitTest.class).load(unitTestName);
if (unitTest == null) {
if (log.isDetailed()) {
log.logDetailed("Unit test '" + unitTestName + "' could not be found");
}
return;
}
//
for (final TransformMeta transformMeta : pipeline.getPipelineMeta().getTransforms()) {
String transformName = transformMeta.getName();
PipelineUnitTestSetLocation inputLocation = unitTest.findInputLocation(transformName);
if (inputLocation != null && StringUtils.isNotEmpty(inputLocation.getDataSetName())) {
String inputDataSetName = inputLocation.getDataSetName();
log.logDetailed("Data Set location found for transform '" + transformName + "' and data set " + inputDataSetName);
// We need to inject data from the data set with the specified name into the transform
//
injectDataSetIntoTransform((LocalPipelineEngine) pipeline, inputDataSetName, metadataProvider, transformMeta, inputLocation);
}
// How about capturing rows for golden data review?
//
PipelineUnitTestSetLocation goldenLocation = unitTest.findGoldenLocation(transformName);
if (goldenLocation != null) {
String goldenDataSetName = goldenLocation.getDataSetName();
if (!StringUtil.isEmpty(goldenDataSetName)) {
log.logDetailed("Capturing rows for validation at pipeline end, transform='" + transformMeta.getName() + "', golden set '" + goldenDataSetName);
final RowCollection rowCollection = new RowCollection();
// Create a row collection map if it's missing...
//
@SuppressWarnings("unchecked") Map<String, RowCollection> collectionMap = (Map<String, RowCollection>) pipeline.getExtensionDataMap().get(DataSetConst.ROW_COLLECTION_MAP);
if (collectionMap == null) {
collectionMap = new HashMap<>();
pipeline.getExtensionDataMap().put(DataSetConst.ROW_COLLECTION_MAP, collectionMap);
}
// Keep the map for safe keeping...
//
collectionMap.put(transformMeta.getName(), rowCollection);
// We'll capture the rows from this one and then evaluate them after execution...
//
IEngineComponent component = pipeline.findComponent(transformMeta.getName(), 0);
component.addRowListener(new RowAdapter() {
@Override
public void rowReadEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
if (rowCollection.getRowMeta() == null) {
rowCollection.setRowMeta(rowMeta);
}
rowCollection.getRows().add(row);
}
});
}
}
}
} catch (Throwable e) {
throw new HopException("Unable to inject data set rows", e);
}
}
use of org.apache.hop.pipeline.PipelineMeta in project hop by apache.
the class PipelineMetaModifier method getTestPipeline.
public PipelineMeta getTestPipeline(ILogChannel log, IVariables variables, IHopMetadataProvider metadataProvider) throws HopException {
// OK, so now replace an input transform with a data set attached with an Injector transform...
// However, we don't want to have the user see this so we need to copy pipeline.pipelineMeta
// first...
//
// Clone seems to has problems so we'll take the long (XML) way around...
//
InputStream stream;
try {
stream = new ByteArrayInputStream(pipelineMeta.getXml(variables).getBytes(Const.XML_ENCODING));
} catch (UnsupportedEncodingException e) {
throw new HopException("Encoding error", e);
}
PipelineMeta copyPipelineMeta = new PipelineMeta(stream, metadataProvider, true, variables);
// Pass the metadata references...
//
copyPipelineMeta.setMetadataProvider(pipelineMeta.getMetadataProvider());
//
for (PipelineUnitTestDatabaseReplacement dbReplacement : unitTest.getDatabaseReplacements()) {
String sourceDatabaseName = variables.resolve(dbReplacement.getOriginalDatabaseName());
String replacementDatabaseName = variables.resolve(dbReplacement.getReplacementDatabaseName());
DatabaseMeta sourceDatabaseMeta = copyPipelineMeta.findDatabase(sourceDatabaseName);
DatabaseMeta replacementDatabaseMeta = copyPipelineMeta.findDatabase(replacementDatabaseName);
if (sourceDatabaseMeta == null) {
throw new HopException("Unable to find source database connection '" + sourceDatabaseName + "', can not be replaced");
}
if (replacementDatabaseMeta == null) {
throw new HopException("Unable to find replacement database connection '" + replacementDatabaseName + "', can not be used to replace");
}
if (log.isDetailed()) {
log.logDetailed("Replaced database connection '" + sourceDatabaseName + "' with connection '" + replacementDatabaseName + "'");
}
sourceDatabaseMeta.replaceMeta(replacementDatabaseMeta);
}
// Replace all transforms with an Input Data Set marker with an Injector
// Replace all transforms with a Golden Data Set marker with a Dummy
// Apply the tweaks to the transforms:
// - Bypass : replace with Dummy
// - Remove : remove transform and all connected hops.
//
// Loop over the original pipeline to allow us to safely modify the copy
//
List<TransformMeta> transforms = pipelineMeta.getTransforms();
for (TransformMeta transform : transforms) {
TransformMeta transformMeta = copyPipelineMeta.findTransform(transform.getName());
PipelineUnitTestSetLocation inputLocation = unitTest.findInputLocation(transformMeta.getName());
PipelineUnitTestSetLocation goldenLocation = unitTest.findGoldenLocation(transformMeta.getName());
PipelineUnitTestTweak transformTweak = unitTest.findTweak(transformMeta.getName());
//
if (inputLocation != null) {
handleInputDataSet(log, inputLocation, unitTest, pipelineMeta, transformMeta, metadataProvider);
}
//
if (goldenLocation != null) {
handleGoldenDataSet(log, goldenLocation, transformMeta, metadataProvider);
}
if (transformTweak != null && transformTweak.getTweak() != null) {
switch(transformTweak.getTweak()) {
case NONE:
break;
case REMOVE_TRANSFORM:
handleTweakRemoveTransform(log, copyPipelineMeta, transformMeta);
break;
case BYPASS_TRANSFORM:
handleTweakBypassTransform(log, transformMeta);
break;
default:
break;
}
}
}
return copyPipelineMeta;
}
use of org.apache.hop.pipeline.PipelineMeta in project hop by apache.
the class WriteToDataSetExtensionPoint method callExtensionPoint.
@Override
public void callExtensionPoint(ILogChannel log, IVariables variables, IPipelineEngine<PipelineMeta> pipeline) throws HopException {
final PipelineMeta pipelineMeta = pipeline.getPipelineMeta();
boolean writeToDataSet = "Y".equalsIgnoreCase(pipeline.getVariable(DataSetConst.VAR_WRITE_TO_DATASET));
if (!writeToDataSet) {
return;
}
pipeline.addExecutionFinishedListener(engine -> {
// Remove the flag when done.
// We don't want to write to the data set every time we run
//
pipeline.setVariable(DataSetConst.VAR_WRITE_TO_DATASET, null);
// Prevent memory leaking as well
//
WriteToDataSetExtensionPoint.transformsMap.remove(pipelineMeta.getName());
WriteToDataSetExtensionPoint.mappingsMap.remove(pipelineMeta.getName());
WriteToDataSetExtensionPoint.setsMap.remove(pipelineMeta.getName());
});
try {
IHopMetadataProvider metadataProvider = pipelineMeta.getMetadataProvider();
if (metadataProvider == null) {
// Nothing to do here, we can't reference data sets.
return;
}
//
for (final TransformMeta transformMeta : pipeline.getPipelineMeta().getTransforms()) {
// We might want to pass the data from this transform into a data set all by itself...
// For this we want to attach a row listener which writes the data.
//
TransformMeta injectMeta = transformsMap.get(pipelineMeta.getName());
if (injectMeta != null && injectMeta.equals(transformMeta)) {
final List<SourceToTargetMapping> mappings = mappingsMap.get(pipelineMeta.getName());
final DataSet dataSet = setsMap.get(pipelineMeta.getName());
if (mappings != null && dataSet != null) {
passTransformRowsToDataSet(pipeline, pipelineMeta, transformMeta, mappings, dataSet);
}
}
}
} catch (Throwable e) {
throw new HopException("Unable to pass rows to data set", e);
}
}
use of org.apache.hop.pipeline.PipelineMeta in project hop by apache.
the class HopNeo4jPerspective method openTransform.
private void openTransform(Session session, String name, String type, String id) {
LogChannel.UI.logDetailed("Open transform : " + id + ", name : " + name + ", type: " + type);
Map<String, Object> params = new HashMap<>();
params.put("subjectName", name);
params.put("subjectType", type);
params.put("subjectId", id);
StringBuilder cypher = new StringBuilder();
cypher.append(// TRANSFORM
"MATCH(e:Execution { name : $subjectName, type : $subjectType, id : $subjectId } )");
cypher.append(// Transform
"-[:EXECUTION_OF_TRANSFORM]->(t:Transform { name : $subjectName } )");
cypher.append("-[:TRANSFORM_OF_PIPELINE]->(p:Pipeline) ");
cypher.append("RETURN p.filename, t.name ");
String[] names = session.readTransaction(tx -> {
Result statementResult = tx.run(cypher.toString(), params);
if (!statementResult.hasNext()) {
statementResult.consume();
// No file found
return null;
}
Record record = statementResult.next();
statementResult.consume();
String filename = LoggingCore.getStringValue(record, 0);
String transformName = LoggingCore.getStringValue(record, 1);
return new String[] { filename, transformName };
});
if (names == null) {
return;
}
String filename = names[0];
String transformName = names[1];
if (StringUtils.isEmpty(filename)) {
return;
}
try {
hopGui.fileDelegate.fileOpen(filename);
if (StringUtils.isEmpty(transformName)) {
return;
}
HopDataOrchestrationPerspective perspective = HopGui.getDataOrchestrationPerspective();
IHopFileTypeHandler typeHandler = perspective.getActiveFileTypeHandler();
if (typeHandler == null || !(typeHandler instanceof HopGuiPipelineGraph)) {
return;
}
HopGuiPipelineGraph graph = (HopGuiPipelineGraph) typeHandler;
PipelineMeta pipelineMeta = graph.getPipelineMeta();
TransformMeta transformMeta = pipelineMeta.findTransform(transformName);
if (transformMeta == null) {
return;
}
pipelineMeta.unselectAll();
transformMeta.setSelected(true);
graph.editTransform(pipelineMeta, transformMeta);
} catch (Exception e) {
new ErrorDialog(hopGui.getShell(), BaseMessages.getString(PKG, "Neo4jPerspectiveDialog.OpeningTransform.Dialog.Header"), BaseMessages.getString(PKG, "Neo4jPerspectiveDialog.OpeningTransform.Dialog.Message"), e);
}
}
Aggregations