use of org.apache.hop.core.logging.LogChannelFileWriter in project hop by apache.
the class AddPipelineServlet method doGet.
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getRequestURI().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug("Addition of pipeline requested");
}
boolean useXML = "Y".equalsIgnoreCase(request.getParameter("xml"));
PrintWriter out = response.getWriter();
BufferedReader in = request.getReader();
if (log.isDetailed()) {
logDetailed("Encoding: " + request.getCharacterEncoding());
}
if (useXML) {
response.setContentType("text/xml");
out.print(XmlHandler.getXmlHeader());
} else {
response.setContentType("text/html");
out.println("<HTML>");
out.println("<HEAD><TITLE>Add pipeline</TITLE></HEAD>");
out.println("<BODY>");
}
response.setStatus(HttpServletResponse.SC_OK);
String realLogFilename = null;
PipelineExecutionConfiguration pipelineExecutionConfiguration = null;
try {
// First read the complete pipeline in memory from the request
//
StringBuilder xml = new StringBuilder(request.getContentLength());
int c;
while ((c = in.read()) != -1) {
xml.append((char) c);
}
// Parse the XML, create a pipeline configuration
//
PipelineConfiguration pipelineConfiguration = PipelineConfiguration.fromXml(xml.toString());
PipelineMeta pipelineMeta = pipelineConfiguration.getPipelineMeta();
pipelineExecutionConfiguration = pipelineConfiguration.getPipelineExecutionConfiguration();
if (log.isDetailed()) {
logDetailed("Logging level set to " + log.getLogLevel().getDescription());
}
String serverObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.HOP_SERVER, null);
servletLoggingObject.setContainerObjectId(serverObjectId);
servletLoggingObject.setLogLevel(pipelineExecutionConfiguration.getLogLevel());
IHopMetadataProvider metadataProvider = new MultiMetadataProvider(variables, getServerConfig().getMetadataProvider(), pipelineConfiguration.getMetadataProvider());
String runConfigurationName = pipelineExecutionConfiguration.getRunConfiguration();
final IPipelineEngine<PipelineMeta> pipeline = PipelineEngineFactory.createPipelineEngine(variables, runConfigurationName, metadataProvider, pipelineMeta);
pipeline.setParent(servletLoggingObject);
if (pipelineExecutionConfiguration.isSetLogfile()) {
realLogFilename = pipelineExecutionConfiguration.getLogFileName();
final LogChannelFileWriter logChannelFileWriter;
try {
FileUtil.createParentFolder(AddPipelineServlet.class, realLogFilename, pipelineExecutionConfiguration.isCreateParentFolder(), pipeline.getLogChannel());
logChannelFileWriter = new LogChannelFileWriter(servletLoggingObject.getLogChannelId(), HopVfs.getFileObject(realLogFilename), pipelineExecutionConfiguration.isSetAppendLogfile());
logChannelFileWriter.startLogging();
pipeline.addExecutionFinishedListener(pipelineEngine -> {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
}
});
} catch (HopException e) {
logError(Const.getStackTracker(e));
}
}
getPipelineMap().addPipeline(pipelineMeta.getName(), serverObjectId, pipeline, pipelineConfiguration);
pipeline.setContainerId(serverObjectId);
String message = "Pipeline '" + pipeline.getPipelineMeta().getName() + "' was added to HopServer with id " + serverObjectId;
if (useXML) {
// Return the log channel id as well
//
out.println(new WebResult(WebResult.STRING_OK, message, serverObjectId));
} else {
out.println("<H1>" + message + "</H1>");
out.println("<p><a href=\"" + convertContextPath(GetPipelineStatusServlet.CONTEXT_PATH) + "?name=" + pipeline.getPipelineMeta().getName() + "&id=" + serverObjectId + "\">Go to the pipeline status page</a><p>");
}
} catch (Exception ex) {
if (useXML) {
out.println(new WebResult(WebResult.STRING_ERROR, Const.getStackTracker(ex)));
} else {
out.println("<p>");
out.println("<pre>");
ex.printStackTrace(out);
out.println("</pre>");
}
}
if (!useXML) {
out.println("<p>");
out.println("</BODY>");
out.println("</HTML>");
}
}
use of org.apache.hop.core.logging.LogChannelFileWriter in project hop by apache.
the class Repeat method executePipeline.
private ExecutionResult executePipeline(String realFilename, int nr, ExecutionResult previousResult, int repetitionNr) throws HopException {
PipelineMeta pipelineMeta = loadPipeline(realFilename, getMetadataProvider(), this);
IPipelineEngine<PipelineMeta> pipeline = PipelineEngineFactory.createPipelineEngine(this, runConfigurationName, getMetadataProvider(), pipelineMeta);
pipeline.setParentWorkflow(getParentWorkflow());
pipeline.setParent(this);
if (keepingValues && previousResult != null) {
pipeline.copyFrom(previousResult.variables);
} else {
pipeline.initializeFrom(getParentWorkflow());
// Also copy the parameters over...
//
pipeline.copyParametersFromDefinitions(pipelineMeta);
}
pipeline.getPipelineMeta().setInternalHopVariables(pipeline);
pipeline.setVariables(getVariablesMap(pipeline, previousResult));
// TODO: check this!
INamedParameters previousParams = previousResult == null ? null : (INamedParameters) previousResult.variables;
IVariables previousVars = previousResult == null ? null : previousResult.variables;
updateParameters(pipeline, previousVars, getParentWorkflow(), previousParams);
pipeline.setLogLevel(getLogLevel());
pipeline.setMetadataProvider(getMetadataProvider());
// Start logging before execution...
//
LogChannelFileWriter fileWriter = null;
try {
if (logFileEnabled) {
fileWriter = logToFile(pipeline, repetitionNr);
}
// Run it!
//
pipeline.prepareExecution();
pipeline.startThreads();
pipeline.waitUntilFinished();
boolean flagSet = pipeline.getExtensionDataMap().get(REPEAT_END_LOOP) != null;
Result result = pipeline.getResult();
return new ExecutionResult(result, pipeline, flagSet);
} finally {
if (logFileEnabled && fileWriter != null) {
fileWriter.stopLogging();
}
}
}
use of org.apache.hop.core.logging.LogChannelFileWriter in project hop by apache.
the class Repeat method executeWorkflow.
private ExecutionResult executeWorkflow(String realFilename, int nr, ExecutionResult previousResult, int repetitionNr) throws HopException {
WorkflowMeta workflowMeta = loadWorkflow(realFilename, getMetadataProvider(), this);
IWorkflowEngine<WorkflowMeta> workflow = WorkflowEngineFactory.createWorkflowEngine(this, runConfigurationName, getMetadataProvider(), workflowMeta, this);
workflow.setParentWorkflow(getParentWorkflow());
workflow.setParentVariables(this);
if (keepingValues && previousResult != null) {
workflow.copyFrom(previousResult.variables);
} else {
workflow.initializeFrom(this);
// Also copy the parameters over...
//
workflow.copyParametersFromDefinitions(workflowMeta);
}
workflow.getWorkflowMeta().setInternalHopVariables(workflow);
workflow.setVariables(getVariablesMap(workflow, previousResult));
// TODO: check this!
INamedParameters previousParams = previousResult == null ? null : (INamedParameters) previousResult.variables;
IVariables previousVars = previousResult == null ? null : (IVariables) previousResult.variables;
updateParameters(workflow, previousVars, getParentWorkflow(), previousParams);
workflow.setLogLevel(getLogLevel());
if (parentWorkflow.isInteractive()) {
workflow.setInteractive(true);
workflow.getActionListeners().addAll(parentWorkflow.getActionListeners());
}
// Link the workflow with the sub-workflow
parentWorkflow.getWorkflowTracker().addWorkflowTracker(workflow.getWorkflowTracker());
// Link both ways!
workflow.getWorkflowTracker().setParentWorkflowTracker(parentWorkflow.getWorkflowTracker());
// Start logging before execution...
//
LogChannelFileWriter fileWriter = null;
try {
if (logFileEnabled) {
fileWriter = logToFile(workflow, repetitionNr);
}
Result result = workflow.startExecution();
boolean flagSet = workflow.getExtensionDataMap().get(REPEAT_END_LOOP) != null;
if (flagSet) {
log.logBasic("End loop flag found, stopping loop.");
}
return new ExecutionResult(result, workflow, flagSet);
} finally {
if (logFileEnabled && fileWriter != null) {
fileWriter.stopLogging();
}
}
}
use of org.apache.hop.core.logging.LogChannelFileWriter in project hop by apache.
the class ActionPipeline method execute.
/**
* Execute this action and return the result. In this case it means, just set the result boolean
* in the Result class.
*
* @param result The result of the previous execution
* @param nr the action number
* @return The Result of the execution.
*/
@Override
public Result execute(Result result, int nr) throws HopException {
result.setEntryNr(nr);
LogChannelFileWriter logChannelFileWriter = null;
LogLevel pipelineLogLevel = parentWorkflow.getLogLevel();
String realLogFilename = "";
if (setLogfile) {
pipelineLogLevel = logFileLevel;
realLogFilename = resolve(getLogFilename());
// if we do not have one, we must fail
if (Utils.isEmpty(realLogFilename)) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Exception.LogFilenameMissing"));
result.setNrErrors(1);
result.setResult(false);
return result;
}
// create parent folder?
if (!FileUtil.createParentFolder(PKG, realLogFilename, createParentFolder, this.getLogChannel())) {
result.setNrErrors(1);
result.setResult(false);
return result;
}
try {
logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), HopVfs.getFileObject(realLogFilename), setAppendLogfile);
logChannelFileWriter.startLogging();
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Error.UnableOpenAppender", realLogFilename, e.toString()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
logDetailed(BaseMessages.getString(PKG, "ActionPipeline.Log.OpeningPipeline", resolve(getFilename())));
// Load the pipeline only once for the complete loop!
// Throws an exception if it was not possible to load the pipeline, for example if the XML file
// doesn't exist.
// Log the stack trace and return an error condition from this
//
PipelineMeta pipelineMeta = null;
try {
pipelineMeta = getPipelineMeta(getMetadataProvider(), this);
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Exception.UnableToRunWorkflow", parentWorkflowMeta.getName(), getName(), StringUtils.trim(e.getMessage())), e);
result.setNrErrors(1);
result.setResult(false);
return result;
}
int iteration = 0;
RowMetaAndData resultRow = null;
boolean first = true;
List<RowMetaAndData> rows = new ArrayList<>(result.getRows());
while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0) && !parentWorkflow.isStopped()) {
//
if (execPerRow) {
result.getRows().clear();
}
if (rows != null && execPerRow) {
resultRow = rows.get(iteration);
} else {
resultRow = null;
}
INamedParameters namedParam = new NamedParameters();
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
//
namedParam.addParameterDefinition(parameters[idx], "", "Action runtime");
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
// There is no field name specified.
//
String value = Const.NVL(resolve(parameterValues[idx]), "");
namedParam.setParameterValue(parameters[idx], value);
} else {
// something filled in, in the field column...
//
String value = "";
if (resultRow != null) {
value = resultRow.getString(parameterFieldNames[idx], "");
}
namedParam.setParameterValue(parameters[idx], value);
}
}
}
}
first = false;
Result previousResult = result;
try {
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "ActionPipeline.StartingPipeline", getFilename(), getName(), getDescription()));
}
if (clearResultRows) {
previousResult.setRows(new ArrayList<>());
}
if (clearResultFiles) {
previousResult.getResultFiles().clear();
}
/*
* Set one or more "result" rows on the pipeline...
*/
if (execPerRow) {
// Execute for each input row
// Just pass a single row
List<RowMetaAndData> newList = new ArrayList<>();
newList.add(resultRow);
// This previous result rows list can be either empty or not.
// Depending on the checkbox "clear result rows"
// In this case, it would execute the pipeline with one extra row each time
// Can't figure out a real use-case for it, but hey, who am I to decide that, right?
// :-)
//
previousResult.getRows().addAll(newList);
if (paramsFromPrevious) {
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
} else {
if (paramsFromPrevious) {
// Copy the input the parameters
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(resolve(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
}
// Handle the parameters...
//
String[] parameterNames = pipelineMeta.listParameters();
prepareFieldNamesParameters(parameters, parameterFieldNames, parameterValues, namedParam, this);
if (StringUtils.isEmpty(runConfiguration)) {
throw new HopException("This action needs a run configuration to use to execute the specified pipeline");
}
runConfiguration = resolve(runConfiguration);
log.logBasic(BaseMessages.getString(PKG, "ActionPipeline.RunConfig.Message", runConfiguration));
// Create the pipeline from meta-data
//
pipeline = PipelineEngineFactory.createPipelineEngine(this, runConfiguration, getMetadataProvider(), pipelineMeta);
pipeline.setParent(this);
// set the parent workflow on the pipeline, variables are taken from here...
//
pipeline.setParentWorkflow(parentWorkflow);
pipeline.setParentVariables(parentWorkflow);
pipeline.setLogLevel(pipelineLogLevel);
pipeline.setPreviousResult(previousResult);
// inject the metadataProvider
pipeline.setMetadataProvider(getMetadataProvider());
// Handle parameters...
//
pipeline.initializeFrom(null);
pipeline.copyParametersFromDefinitions(pipelineMeta);
// Pass the parameter values and activate...
//
TransformWithMappingMeta.activateParams(pipeline, pipeline, this, parameterNames, parameters, parameterValues, isPassingAllParameters());
// First get the root workflow
//
IWorkflowEngine<WorkflowMeta> rootWorkflow = parentWorkflow;
while (rootWorkflow.getParentWorkflow() != null) {
rootWorkflow = rootWorkflow.getParentWorkflow();
}
try {
// Start execution...
//
pipeline.execute();
//
if (isWaitingToFinish()) {
pipeline.waitUntilFinished();
if (parentWorkflow.isStopped() || pipeline.getErrors() != 0) {
pipeline.stopAll();
result.setNrErrors(1);
}
updateResult(result);
}
if (setLogfile) {
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, HopVfs.getFileObject(realLogFilename), parentWorkflow.getWorkflowName(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
} catch (HopException e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.Error.UnablePrepareExec"), e);
result.setNrErrors(1);
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "ActionPipeline.ErrorUnableOpenPipeline", e.getMessage()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
}
iteration++;
}
if (setLogfile) {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentWorkflow.getWorkflowName(), getName());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
//
if (logChannelFileWriter.getException() != null) {
logError("Unable to open log file [" + getLogFilename() + "] : ");
logError(Const.getStackTracker(logChannelFileWriter.getException()));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
}
if (result.getNrErrors() == 0) {
result.setResult(true);
} else {
result.setResult(false);
}
return result;
}
use of org.apache.hop.core.logging.LogChannelFileWriter in project hop by apache.
the class Repeat method logToFile.
private LogChannelFileWriter logToFile(ILoggingObject loggingObject, int repetitionNr) throws HopException {
// Calculate the filename
//
Date currentDate = new Date();
String filename = resolve(logFileBase);
if (logFileDateAdded) {
filename += "_" + new SimpleDateFormat("yyyyMMdd").format(currentDate);
}
if (logFileTimeAdded) {
filename += "_" + new SimpleDateFormat("HHmmss").format(currentDate);
}
if (logFileRepetitionAdded) {
filename += "_" + new DecimalFormat("0000").format(repetitionNr);
}
filename += "." + resolve(logFileExtension);
String logChannelId = loggingObject.getLogChannelId();
LogChannelFileWriter fileWriter = new LogChannelFileWriter(logChannelId, HopVfs.getFileObject(filename), logFileAppended, Const.toInt(logFileUpdateInterval, 5000));
fileWriter.startLogging();
return fileWriter;
}
Aggregations