use of org.apache.hop.core.parameters.NamedParameters in project hop by apache.
the class PipelineMeta method clear.
/**
* Clears the pipeline's meta-data, including the lists of databases, transforms, hops, notes,
* dependencies, partition schemas, hop servers, and cluster schemas. Logging information and
* timeouts are reset to defaults, and recent connection info is cleared.
*/
@Override
public void clear() {
nameSynchronizedWithFilename = true;
transforms = new ArrayList<>();
hops = new ArrayList<>();
namedParams = new NamedParameters();
transformChangeListeners = new ArrayList<>();
pipelineStatus = -1;
pipelineVersion = null;
undo = new ArrayList<>();
maxUndo = Const.MAX_UNDO;
undoPosition = -1;
super.clear();
// The performance monitoring options
//
capturingTransformPerformanceSnapShots = false;
// every 1 seconds
transformPerformanceCapturingDelay = 1000;
// maximum 100 data points
transformPerformanceCapturingSizeLimit = "100";
transformFieldsCache = new HashMap<>();
loopCache = new HashMap<>();
previousTransformCache = new HashMap<>();
pipelineType = PipelineType.Normal;
}
use of org.apache.hop.core.parameters.NamedParameters in project hop by apache.
the class PipelineMeta method realClone.
/**
* Perform a real clone of the pipeline meta-data object, including cloning all lists and copying
* all values. If the doClear parameter is true, the clone will be cleared of ALL values before
* the copy. If false, only the copied fields will be cleared.
*
* @param doClear Whether to clear all of the clone's data before copying from the source object
* @return a real clone of the calling object
*/
public Object realClone(boolean doClear) {
try {
PipelineMeta pipelineMeta = (PipelineMeta) super.clone();
if (doClear) {
pipelineMeta.clear();
} else {
// Clear out the things we're replacing below
pipelineMeta.transforms = new ArrayList<>();
pipelineMeta.hops = new ArrayList<>();
pipelineMeta.notes = new ArrayList<>();
pipelineMeta.namedParams = new NamedParameters();
pipelineMeta.transformChangeListeners = new ArrayList<>();
}
for (TransformMeta transform : transforms) {
pipelineMeta.addTransform((TransformMeta) transform.clone());
}
// Transform references are original yet. Set them to the clones.
for (TransformMeta transform : pipelineMeta.getTransforms()) {
final ITransformMeta transformMeta = transform.getTransform();
if (transformMeta != null) {
final ITransformIOMeta transformIOMeta = transformMeta.getTransformIOMeta();
if (transformIOMeta != null) {
for (IStream stream : transformIOMeta.getInfoStreams()) {
String streamTransformName = stream.getTransformName();
if (streamTransformName != null) {
TransformMeta streamTransformMeta = pipelineMeta.findTransform(streamTransformName);
stream.setTransformMeta(streamTransformMeta);
}
}
}
}
}
for (PipelineHopMeta hop : hops) {
pipelineMeta.addPipelineHop((PipelineHopMeta) hop.clone());
}
for (NotePadMeta note : notes) {
pipelineMeta.addNote(note.clone());
}
for (String key : listParameters()) {
pipelineMeta.addParameterDefinition(key, getParameterDefault(key), getParameterDescription(key));
}
return pipelineMeta;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
use of org.apache.hop.core.parameters.NamedParameters in project hop by apache.
the class WorkflowMeta method realClone.
/**
* Perform a real clone of the workflow meta-data object, including cloning all lists and copying
* all values. If the doClear parameter is true, the clone will be cleared of ALL values before
* the copy. If false, only the copied fields will be cleared.
*
* @param doClear Whether to clear all of the clone's data before copying from the source object
* @return a real clone of the calling object
*/
public Object realClone(boolean doClear) {
try {
WorkflowMeta workflowMeta = (WorkflowMeta) super.clone();
if (doClear) {
workflowMeta.clear();
} else {
workflowMeta.workflowActions = new ArrayList<>();
workflowMeta.workflowHops = new ArrayList<>();
workflowMeta.notes = new ArrayList<>();
workflowMeta.namedParams = new NamedParameters();
}
for (ActionMeta action : workflowActions) {
workflowMeta.workflowActions.add((ActionMeta) action.cloneDeep());
}
for (WorkflowHopMeta hop : workflowHops) {
workflowMeta.workflowHops.add(hop.clone());
}
for (NotePadMeta notePad : notes) {
workflowMeta.notes.add(notePad.clone());
}
for (String key : listParameters()) {
workflowMeta.addParameterDefinition(key, getParameterDefault(key), getParameterDescription(key));
}
return workflowMeta;
} catch (Exception e) {
return null;
}
}
use of org.apache.hop.core.parameters.NamedParameters in project hop by apache.
the class ActionPipeline method execute.
/**
* Execute this action and return the result. In this case it means, just set the result boolean
* in the Result class.
*
* @param result The result of the previous execution
* @param nr the action number
* @return The Result of the execution.
*/
@Override
public Result execute(Result result, int nr) throws HopException {
result.setEntryNr(nr);
LogChannelFileWriter logChannelFileWriter = null;
LogLevel pipelineLogLevel = parentWorkflow.getLogLevel();
String realLogFilename = "";
if (setLogfile) {
pipelineLogLevel = logFileLevel;
realLogFilename = resolve(getLogFilename());
// if we do not have one, we must fail
if (Utils.isEmpty(realLogFilename)) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Exception.LogFilenameMissing"));
result.setNrErrors(1);
result.setResult(false);
return result;
}
// create parent folder?
if (!FileUtil.createParentFolder(PKG, realLogFilename, createParentFolder, this.getLogChannel())) {
result.setNrErrors(1);
result.setResult(false);
return result;
}
try {
logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), HopVfs.getFileObject(realLogFilename), setAppendLogfile);
logChannelFileWriter.startLogging();
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Error.UnableOpenAppender", realLogFilename, e.toString()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
logDetailed(BaseMessages.getString(PKG, "ActionPipeline.Log.OpeningPipeline", resolve(getFilename())));
// Load the pipeline only once for the complete loop!
// Throws an exception if it was not possible to load the pipeline, for example if the XML file
// doesn't exist.
// Log the stack trace and return an error condition from this
//
PipelineMeta pipelineMeta = null;
try {
pipelineMeta = getPipelineMeta(getMetadataProvider(), this);
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Exception.UnableToRunWorkflow", parentWorkflowMeta.getName(), getName(), StringUtils.trim(e.getMessage())), e);
result.setNrErrors(1);
result.setResult(false);
return result;
}
int iteration = 0;
RowMetaAndData resultRow = null;
boolean first = true;
List<RowMetaAndData> rows = new ArrayList<>(result.getRows());
while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0) && !parentWorkflow.isStopped()) {
//
if (execPerRow) {
result.getRows().clear();
}
if (rows != null && execPerRow) {
resultRow = rows.get(iteration);
} else {
resultRow = null;
}
INamedParameters namedParam = new NamedParameters();
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
//
namedParam.addParameterDefinition(parameters[idx], "", "Action runtime");
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
// There is no field name specified.
//
String value = Const.NVL(resolve(parameterValues[idx]), "");
namedParam.setParameterValue(parameters[idx], value);
} else {
// something filled in, in the field column...
//
String value = "";
if (resultRow != null) {
value = resultRow.getString(parameterFieldNames[idx], "");
}
namedParam.setParameterValue(parameters[idx], value);
}
}
}
}
first = false;
Result previousResult = result;
try {
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "ActionPipeline.StartingPipeline", getFilename(), getName(), getDescription()));
}
if (clearResultRows) {
previousResult.setRows(new ArrayList<>());
}
if (clearResultFiles) {
previousResult.getResultFiles().clear();
}
/*
* Set one or more "result" rows on the pipeline...
*/
if (execPerRow) {
// Execute for each input row
// Just pass a single row
List<RowMetaAndData> newList = new ArrayList<>();
newList.add(resultRow);
// This previous result rows list can be either empty or not.
// Depending on the checkbox "clear result rows"
// In this case, it would execute the pipeline with one extra row each time
// Can't figure out a real use-case for it, but hey, who am I to decide that, right?
// :-)
//
previousResult.getRows().addAll(newList);
if (paramsFromPrevious) {
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
} else {
if (paramsFromPrevious) {
// Copy the input the parameters
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
}
// Handle the parameters...
//
String[] parameterNames = pipelineMeta.listParameters();
prepareFieldNamesParameters(parameters, parameterFieldNames, parameterValues, namedParam, this);
if (StringUtils.isEmpty(runConfiguration)) {
throw new HopException("This action needs a run configuration to use to execute the specified pipeline");
}
runConfiguration = resolve(runConfiguration);
log.logBasic(BaseMessages.getString(PKG, "ActionPipeline.RunConfig.Message", runConfiguration));
// Create the pipeline from meta-data
//
pipeline = PipelineEngineFactory.createPipelineEngine(this, runConfiguration, getMetadataProvider(), pipelineMeta);
pipeline.setParent(this);
// set the parent workflow on the pipeline, variables are taken from here...
//
pipeline.setParentWorkflow(parentWorkflow);
pipeline.setParentVariables(parentWorkflow);
pipeline.setLogLevel(pipelineLogLevel);
pipeline.setPreviousResult(previousResult);
// inject the metadataProvider
pipeline.setMetadataProvider(getMetadataProvider());
// Handle parameters...
//
pipeline.initializeFrom(null);
pipeline.copyParametersFromDefinitions(pipelineMeta);
// Pass the parameter values and activate...
//
TransformWithMappingMeta.activateParams(pipeline, pipeline, this, parameterNames, parameters, parameterValues, isPassingAllParameters());
// First get the root workflow
//
IWorkflowEngine<WorkflowMeta> rootWorkflow = parentWorkflow;
while (rootWorkflow.getParentWorkflow() != null) {
rootWorkflow = rootWorkflow.getParentWorkflow();
}
try {
// Start execution...
//
pipeline.execute();
//
if (isWaitingToFinish()) {
pipeline.waitUntilFinished();
if (parentWorkflow.isStopped() || pipeline.getErrors() != 0) {
pipeline.stopAll();
result.setNrErrors(1);
}
updateResult(result);
}
if (setLogfile) {
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, HopVfs.getFileObject(realLogFilename), parentWorkflow.getWorkflowName(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Error.UnablePrepareExec"), e);
result.setNrErrors(1);
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.ErrorUnableOpenPipeline", e.getMessage()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
}
iteration++;
}
if (setLogfile) {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentWorkflow.getWorkflowName(), getName());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
//
if (logChannelFileWriter.getException() != null) {
logError("Unable to open log file [" + getLogFilename() + "] : ");
logError(Const.getStackTracker(logChannelFileWriter.getException()));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
}
if (result.getNrErrors() == 0) {
result.setResult(true);
} else {
result.setResult(false);
}
return result;
}
use of org.apache.hop.core.parameters.NamedParameters in project hop by apache.
the class ActionWorkflow method execute.
@Override
public Result execute(Result result, int nr) throws HopException {
result.setEntryNr(nr);
LogChannelFileWriter logChannelFileWriter = null;
LogLevel jobLogLevel = parentWorkflow.getLogLevel();
if (setLogfile) {
String realLogFilename = resolve(getLogFilename());
// if we do not have one, we must fail
if (Utils.isEmpty(realLogFilename)) {
logError(BaseMessages.getString(PKG, "ActionWorkflow.Exception.LogFilenameMissing"));
result.setNrErrors(1);
result.setResult(false);
return result;
}
// create parent folder?
if (!createParentFolder(realLogFilename)) {
result.setNrErrors(1);
result.setResult(false);
return result;
}
try {
logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), HopVfs.getFileObject(realLogFilename), setAppendLogfile);
logChannelFileWriter.startLogging();
} catch (HopException e) {
logError("Unable to open file appender for file [" + getLogFilename() + "] : " + e.toString());
logError(Const.getStackTracker(e));
result.setNrErrors(1);
result.setResult(false);
return result;
}
jobLogLevel = logFileLevel;
}
try {
// First load the workflow, outside of the loop...
if (parentWorkflow.getWorkflowMeta() != null) {
// reset the internal variables again.
// Maybe we should split up the variables even more like in UNIX shells.
// The internal variables need to be reset to be able use them properly
// in 2 sequential sub workflows.
parentWorkflow.getWorkflowMeta().setInternalHopVariables(this);
}
// Explain what we are loading...
//
logDetailed("Loading workflow from XML file : [" + resolve(filename) + "]");
WorkflowMeta workflowMeta = getWorkflowMeta(getMetadataProvider(), this);
//
if (workflowMeta == null) {
throw new HopException("Unable to load the workflow: please specify a filename");
}
verifyRecursiveExecution(parentWorkflow, workflowMeta);
int iteration = 0;
copyFrom(parentWorkflow);
setParentVariables(parentWorkflow);
RowMetaAndData resultRow = null;
boolean first = true;
List<RowMetaAndData> rows = new ArrayList<>(result.getRows());
while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) {
first = false;
//
if (execPerRow) {
result.getRows().clear();
}
if (rows != null && execPerRow) {
resultRow = rows.get(iteration);
} else {
resultRow = null;
}
INamedParameters namedParam = new NamedParameters();
//
if (paramsFromPrevious) {
String[] parentParameters = parentWorkflow.listParameters();
for (int idx = 0; idx < parentParameters.length; idx++) {
String par = parentParameters[idx];
String def = parentWorkflow.getParameterDefault(par);
String val = parentWorkflow.getParameterValue(par);
String des = parentWorkflow.getParameterDescription(par);
namedParam.addParameterDefinition(par, def, des);
namedParam.setParameterValue(par, val);
}
}
//
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
//
if (Const.indexOfString(parameters[idx], namedParam.listParameters()) < 0) {
// We have a parameter
try {
namedParam.addParameterDefinition(parameters[idx], "", "Action runtime");
} catch (DuplicateParamException e) {
// Should never happen
//
logError("Duplicate parameter definition for " + parameters[idx]);
}
}
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
// something filled in, in the field column...
//
String value = "";
if (resultRow != null) {
value = resultRow.getString(parameterFieldNames[idx], "");
}
namedParam.setParameterValue(parameters[idx], value);
}
}
}
}
Result oneResult = new Result();
List<RowMetaAndData> sourceRows = null;
if (execPerRow) {
// Execute for each input row
// Just pass a single row
//
List<RowMetaAndData> newList = new ArrayList<>();
newList.add(resultRow);
sourceRows = newList;
if (paramsFromPrevious) {
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
} else {
// Keep it as it was...
//
sourceRows = result.getRows();
if (paramsFromPrevious) {
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
}
// Create a new workflow
//
workflow = WorkflowEngineFactory.createWorkflowEngine(this, resolve(runConfiguration), getMetadataProvider(), workflowMeta, this);
workflow.setParentWorkflow(parentWorkflow);
workflow.setLogLevel(jobLogLevel);
workflow.shareWith(this);
workflow.setResult(result);
workflow.setInternalHopVariables();
workflow.copyParametersFromDefinitions(workflowMeta);
workflow.setInteractive(parentWorkflow.isInteractive());
if (workflow.isInteractive()) {
workflow.getActionListeners().addAll(parentWorkflow.getActionListeners());
}
// Set the parameters calculated above on this instance.
//
workflow.clearParameterValues();
String[] parameterNames = workflow.listParameters();
for (int idx = 0; idx < parameterNames.length; idx++) {
// Grab the parameter value set in the action
//
String thisValue = namedParam.getParameterValue(parameterNames[idx]);
if (!Utils.isEmpty(thisValue)) {
// Set the value as specified by the user in the action
//
workflow.setParameterValue(parameterNames[idx], thisValue);
} else {
//
if (isPassingAllParameters()) {
String parentValue = parentWorkflow.getParameterValue(parameterNames[idx]);
if (!Utils.isEmpty(parentValue)) {
workflow.setParameterValue(parameterNames[idx], parentValue);
}
}
}
}
workflow.activateParameters(workflow);
// Set the source rows we calculated above...
//
workflow.setSourceRows(sourceRows);
// Link the workflow with the sub-workflow
parentWorkflow.getWorkflowTracker().addWorkflowTracker(workflow.getWorkflowTracker());
// Link both ways!
workflow.getWorkflowTracker().setParentWorkflowTracker(parentWorkflow.getWorkflowTracker());
ActionWorkflowRunner runner = new ActionWorkflowRunner(workflow, result, nr, log);
Thread workflowRunnerThread = new Thread(runner);
// added UUID to thread name, otherwise threads do share names if workflows actions are
// executed in parallel in a
// parent workflow
// if that happens, contained pipelines start closing each other's connections
workflowRunnerThread.setName(Const.NVL(workflow.getWorkflowMeta().getName(), workflow.getWorkflowMeta().getFilename()) + " UUID: " + UUID.randomUUID().toString());
workflowRunnerThread.start();
if (isWaitingToFinish()) {
//
while (!runner.isFinished() && !parentWorkflow.isStopped()) {
try {
Thread.sleep(0, 1);
} catch (InterruptedException e) {
// Ignore
}
}
// if the parent-workflow was stopped, stop the sub-workflow too...
if (parentWorkflow.isStopped()) {
workflow.stopExecution();
// Wait until finished!
runner.waitUntilFinished();
}
oneResult = runner.getResult();
// clear only the numbers, NOT the files or rows.
result.clear();
result.add(oneResult);
// Set the result rows too, if any ...
if (!Utils.isEmpty(oneResult.getRows())) {
result.setRows(new ArrayList<>(oneResult.getRows()));
}
//
if (oneResult.getResult() == false) {
result.setNrErrors(result.getNrErrors() + 1);
}
}
iteration++;
}
} catch (HopException ke) {
logError("Error running action 'workflow' : ", ke);
result.setResult(false);
result.setNrErrors(1L);
}
if (setLogfile) {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentWorkflow.getWorkflowName(), getName());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
//
if (logChannelFileWriter.getException() != null) {
logError("Unable to open log file [" + getLogFilename() + "] : ");
logError(Const.getStackTracker(logChannelFileWriter.getException()));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
}
if (result.getNrErrors() > 0) {
result.setResult(false);
} else {
result.setResult(true);
}
return result;
}
Aggregations