use of org.apache.hop.core.logging.LogLevel in project hop by apache.
the class WorkflowActionWriteToLogLoadSaveTest method createAttributeValidatorsMap.
@Override
protected Map<String, IFieldLoadSaveValidator<?>> createAttributeValidatorsMap() {
EnumSet<LogLevel> logLevels = EnumSet.allOf(LogLevel.class);
LogLevel random = (LogLevel) logLevels.toArray()[new Random().nextInt(logLevels.size())];
return toMap("loglevel", new EnumLoadSaveValidator<>(random));
}
use of org.apache.hop.core.logging.LogLevel in project hop by apache.
the class ActionShell method execute.
@Override
public Result execute(Result result, int nr) throws HopException {
FileLoggingEventListener loggingEventListener = null;
LogLevel shellLogLevel = parentWorkflow.getLogLevel();
if (setLogfile) {
String realLogFilename = resolve(getLogFilename());
// if we do not have one, we must fail
if (Utils.isEmpty(realLogFilename)) {
logError(BaseMessages.getString(PKG, "ActionShell.Exception.LogFilenameMissing"));
result.setNrErrors(1);
result.setResult(false);
return result;
}
try {
loggingEventListener = new FileLoggingEventListener(getLogChannelId(), realLogFilename, setAppendLogfile);
HopLogStore.getAppender().addLoggingEventListener(loggingEventListener);
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionShell.Error.UnableopenAppenderFile", getLogFilename(), e.toString()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
result.setResult(false);
return result;
}
shellLogLevel = logFileLevel;
}
log.setLogLevel(shellLogLevel);
result.setEntryNr(nr);
// "Translate" the arguments for later
String[] substArgs = null;
if (arguments != null) {
substArgs = new String[arguments.length];
for (int idx = 0; idx < arguments.length; idx++) {
substArgs[idx] = resolve(arguments[idx]);
}
}
int iteration = 0;
String[] args = substArgs;
RowMetaAndData resultRow = null;
boolean first = true;
List<RowMetaAndData> rows = result.getRows();
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "ActionShell.Log.FoundPreviousRows", "" + (rows != null ? rows.size() : 0)));
}
while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) {
first = false;
if (rows != null && execPerRow) {
resultRow = rows.get(iteration);
} else {
resultRow = null;
}
List<RowMetaAndData> cmdRows = null;
if (execPerRow) {
if (argFromPrevious) {
if (resultRow != null) {
args = new String[resultRow.size()];
for (int i = 0; i < resultRow.size(); i++) {
args[i] = resultRow.getString(i, null);
}
}
} else {
// Just pass a single row
List<RowMetaAndData> newList = new ArrayList<>();
newList.add(resultRow);
cmdRows = newList;
}
} else {
if (argFromPrevious) {
// Only put the first Row on the arguments
args = null;
if (resultRow != null) {
args = new String[resultRow.size()];
for (int i = 0; i < resultRow.size(); i++) {
args[i] = resultRow.getString(i, null);
}
} else {
cmdRows = rows;
}
} else {
// Keep it as it was...
cmdRows = rows;
}
}
executeShell(result, cmdRows, args);
iteration++;
}
if (setLogfile) {
if (loggingEventListener != null) {
HopLogStore.getAppender().removeLoggingEventListener(loggingEventListener);
loggingEventListener.close();
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, loggingEventListener.getFile(), parentWorkflow.getWorkflowName(), getName());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
}
return result;
}
use of org.apache.hop.core.logging.LogLevel in project hop by apache.
the class WriteToLogMetaSymmetric method setLogLevelString.
public void setLogLevelString(String value) {
LogLevel lvl = LogLevel.getLogLevelForCode(value);
super.setLogLevel(lvl.getLevel());
}
use of org.apache.hop.core.logging.LogLevel in project hop by apache.
the class ActionPipeline method execute.
/**
* Execute this action and return the result. In this case it means, just set the result boolean
* in the Result class.
*
* @param result The result of the previous execution
* @param nr the action number
* @return The Result of the execution.
*/
@Override
public Result execute(Result result, int nr) throws HopException {
result.setEntryNr(nr);
LogChannelFileWriter logChannelFileWriter = null;
LogLevel pipelineLogLevel = parentWorkflow.getLogLevel();
String realLogFilename = "";
if (setLogfile) {
pipelineLogLevel = logFileLevel;
realLogFilename = resolve(getLogFilename());
// if we do not have one, we must fail
if (Utils.isEmpty(realLogFilename)) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Exception.LogFilenameMissing"));
result.setNrErrors(1);
result.setResult(false);
return result;
}
// create parent folder?
if (!FileUtil.createParentFolder(PKG, realLogFilename, createParentFolder, this.getLogChannel())) {
result.setNrErrors(1);
result.setResult(false);
return result;
}
try {
logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), HopVfs.getFileObject(realLogFilename), setAppendLogfile);
logChannelFileWriter.startLogging();
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Error.UnableOpenAppender", realLogFilename, e.toString()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
logDetailed(BaseMessages.getString(PKG, "ActionPipeline.Log.OpeningPipeline", resolve(getFilename())));
// Load the pipeline only once for the complete loop!
// Throws an exception if it was not possible to load the pipeline, for example if the XML file
// doesn't exist.
// Log the stack trace and return an error condition from this
//
PipelineMeta pipelineMeta = null;
try {
pipelineMeta = getPipelineMeta(getMetadataProvider(), this);
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Exception.UnableToRunWorkflow", parentWorkflowMeta.getName(), getName(), StringUtils.trim(e.getMessage())), e);
result.setNrErrors(1);
result.setResult(false);
return result;
}
int iteration = 0;
RowMetaAndData resultRow = null;
boolean first = true;
List<RowMetaAndData> rows = new ArrayList<>(result.getRows());
while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0) && !parentWorkflow.isStopped()) {
//
if (execPerRow) {
result.getRows().clear();
}
if (rows != null && execPerRow) {
resultRow = rows.get(iteration);
} else {
resultRow = null;
}
INamedParameters namedParam = new NamedParameters();
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
//
namedParam.addParameterDefinition(parameters[idx], "", "Action runtime");
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
// There is no field name specified.
//
String value = Const.NVL(resolve(parameterValues[idx]), "");
namedParam.setParameterValue(parameters[idx], value);
} else {
// something filled in, in the field column...
//
String value = "";
if (resultRow != null) {
value = resultRow.getString(parameterFieldNames[idx], "");
}
namedParam.setParameterValue(parameters[idx], value);
}
}
}
}
first = false;
Result previousResult = result;
try {
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "ActionPipeline.StartingPipeline", getFilename(), getName(), getDescription()));
}
if (clearResultRows) {
previousResult.setRows(new ArrayList<>());
}
if (clearResultFiles) {
previousResult.getResultFiles().clear();
}
/*
* Set one or more "result" rows on the pipeline...
*/
if (execPerRow) {
// Execute for each input row
// Just pass a single row
List<RowMetaAndData> newList = new ArrayList<>();
newList.add(resultRow);
// This previous result rows list can be either empty or not.
// Depending on the checkbox "clear result rows"
// In this case, it would execute the pipeline with one extra row each time
// Can't figure out a real use-case for it, but hey, who am I to decide that, right?
// :-)
//
previousResult.getRows().addAll(newList);
if (paramsFromPrevious) {
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
} else {
if (paramsFromPrevious) {
// Copy the input the parameters
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
}
// Handle the parameters...
//
String[] parameterNames = pipelineMeta.listParameters();
prepareFieldNamesParameters(parameters, parameterFieldNames, parameterValues, namedParam, this);
if (StringUtils.isEmpty(runConfiguration)) {
throw new HopException("This action needs a run configuration to use to execute the specified pipeline");
}
runConfiguration = resolve(runConfiguration);
log.logBasic(BaseMessages.getString(PKG, "ActionPipeline.RunConfig.Message", runConfiguration));
// Create the pipeline from meta-data
//
pipeline = PipelineEngineFactory.createPipelineEngine(this, runConfiguration, getMetadataProvider(), pipelineMeta);
pipeline.setParent(this);
// set the parent workflow on the pipeline, variables are taken from here...
//
pipeline.setParentWorkflow(parentWorkflow);
pipeline.setParentVariables(parentWorkflow);
pipeline.setLogLevel(pipelineLogLevel);
pipeline.setPreviousResult(previousResult);
// inject the metadataProvider
pipeline.setMetadataProvider(getMetadataProvider());
// Handle parameters...
//
pipeline.initializeFrom(null);
pipeline.copyParametersFromDefinitions(pipelineMeta);
// Pass the parameter values and activate...
//
TransformWithMappingMeta.activateParams(pipeline, pipeline, this, parameterNames, parameters, parameterValues, isPassingAllParameters());
// First get the root workflow
//
IWorkflowEngine<WorkflowMeta> rootWorkflow = parentWorkflow;
while (rootWorkflow.getParentWorkflow() != null) {
rootWorkflow = rootWorkflow.getParentWorkflow();
}
try {
// Start execution...
//
pipeline.execute();
//
if (isWaitingToFinish()) {
pipeline.waitUntilFinished();
if (parentWorkflow.isStopped() || pipeline.getErrors() != 0) {
pipeline.stopAll();
result.setNrErrors(1);
}
updateResult(result);
}
if (setLogfile) {
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, HopVfs.getFileObject(realLogFilename), parentWorkflow.getWorkflowName(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Error.UnablePrepareExec"), e);
result.setNrErrors(1);
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.ErrorUnableOpenPipeline", e.getMessage()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
}
iteration++;
}
if (setLogfile) {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentWorkflow.getWorkflowName(), getName());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
//
if (logChannelFileWriter.getException() != null) {
logError("Unable to open log file [" + getLogFilename() + "] : ");
logError(Const.getStackTracker(logChannelFileWriter.getException()));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
}
if (result.getNrErrors() == 0) {
result.setResult(true);
} else {
result.setResult(false);
}
return result;
}
use of org.apache.hop.core.logging.LogLevel in project hop by apache.
the class SetTransformDebugLevelExtensionPoint method callExtensionPoint.
@Override
public void callExtensionPoint(ILogChannel log, IVariables variables, IPipelineEngine<PipelineMeta> pipeline) throws HopException {
Map<String, String> transformLevelMap = pipeline.getPipelineMeta().getAttributesMap().get(Defaults.DEBUG_GROUP);
if (transformLevelMap != null) {
log.logDetailed("Set debug level information on pipeline : " + pipeline.getPipelineMeta().getName());
// Figure out which transforms were involved from the map.
// Trying to go after each transform in a very large pipeline might otherwise
// slow things down.
//
List<String> transformNames = new ArrayList<>();
for (String key : transformLevelMap.keySet()) {
int index = key.indexOf(" : ");
if (index > 0) {
String transformName = key.substring(0, index);
if (!transformNames.contains(transformName)) {
transformNames.add(transformName);
}
}
}
for (String transformName : transformNames) {
log.logDetailed("Handling debug level for transform : " + transformName);
try {
final TransformDebugLevel debugLevel = DebugLevelUtil.getTransformDebugLevel(transformLevelMap, transformName);
if (debugLevel != null) {
log.logDetailed("Found debug level info for transform " + transformName);
List<IEngineComponent> transformCopies = pipeline.getComponentCopies(transformName);
if (debugLevel.getStartRow() < 0 && debugLevel.getEndRow() < 0 && debugLevel.getCondition().isEmpty()) {
log.logDetailed("Set logging level for transform " + transformName + " to " + debugLevel.getLogLevel().getDescription());
//
for (IEngineComponent transformCopy : transformCopies) {
LogLevel logLevel = debugLevel.getLogLevel();
transformCopy.getLogChannel().setLogLevel(logLevel);
log.logDetailed("Applied logging level " + logLevel.getDescription() + " on transform copy " + transformCopy.getName() + "." + transformCopy.getCopyNr());
}
} else {
//
for (IEngineComponent transformCopy : transformCopies) {
final LogLevel baseLogLevel = transformCopy.getLogChannel().getLogLevel();
final AtomicLong rowCounter = new AtomicLong(0L);
transformCopy.addRowListener(new IRowListener() {
@Override
public void rowReadEvent(IRowMeta rowMeta, Object[] row) {
rowCounter.incrementAndGet();
boolean enabled = false;
Condition condition = debugLevel.getCondition();
if (debugLevel.getStartRow() > 0 && rowCounter.get() >= debugLevel.getStartRow() && debugLevel.getEndRow() >= 0 && debugLevel.getEndRow() >= rowCounter.get()) {
// If we have a start and an end, we want to stay between start and end
enabled = true;
} else if (debugLevel.getStartRow() <= 0 && debugLevel.getEndRow() >= 0 && rowCounter.get() <= debugLevel.getEndRow()) {
// If don't have a start row, just and end...
enabled = true;
} else if (debugLevel.getEndRow() <= 0 && debugLevel.getStartRow() >= 0 && rowCounter.get() >= debugLevel.getStartRow()) {
enabled = true;
}
if ((debugLevel.getStartRow() <= 0 && debugLevel.getEndRow() <= 0 || enabled) && !condition.isEmpty()) {
enabled = condition.evaluate(rowMeta, row);
}
if (enabled) {
transformCopy.setLogLevel(debugLevel.getLogLevel());
}
}
@Override
public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
// Set the log level back to the original value.
//
transformCopy.getLogChannel().setLogLevel(baseLogLevel);
}
@Override
public void errorRowWrittenEvent(IRowMeta rowMeta, Object[] row) {
}
});
}
}
}
} catch (Exception e) {
log.logError("Unable to handle specific debug level for transform : " + transformName, e);
}
}
}
}
Aggregations