use of org.jenkinsci.plugins.workflow.actions.LogAction in project blueocean-plugin by jenkinsci.
the class NodeLogResource method doIndex.
public void doIndex(StaplerRequest req, StaplerResponse rsp, @Header("Accept") AcceptHeader accept) {
String download = req.getParameter("download");
if ("true".equalsIgnoreCase(download)) {
rsp.setHeader("Content-Disposition", "attachment; filename=log.txt");
}
rsp.setContentType("text/plain;charset=UTF-8");
rsp.setStatus(HttpServletResponse.SC_OK);
long count = 0;
try (CharSpool spool = new CharSpool()) {
for (BluePipelineStep blueStep : steps) {
if (blueStep instanceof PipelineStepImpl) {
PipelineStepImpl step = (PipelineStepImpl) blueStep;
final FlowNodeWrapper node = step.getFlowNodeWrapper();
if (node.isLoggable()) {
LogAction logAction = node.getNode().getAction(LogAction.class);
if (logAction != null) {
count += logAction.getLogText().writeLogTo(0, spool);
String errorLog = node.blockError();
if (errorLog != null) {
count += appendError(errorLog, new WriterOutputStream(spool));
}
}
} else {
String errorLog = step.getFlowNodeWrapper().nodeError();
if (errorLog == null) {
errorLog = step.getFlowNodeWrapper().blockError();
}
if (errorLog != null) {
count += appendError(errorLog, new WriterOutputStream(spool));
}
}
}
}
Writer writer;
if (count > 0) {
writer = (count > 4096) ? rsp.getCompressedWriter(req) : rsp.getWriter();
spool.flush();
spool.writeTo(new LineEndNormalizingWriter(writer));
rsp.addHeader("X-Text-Size", String.valueOf(count));
writer.close();
}
} catch (IOException e) {
throw new ServiceException.UnexpectedErrorException("Error reading log");
}
}
use of org.jenkinsci.plugins.workflow.actions.LogAction in project workflow-job-plugin by jenkinsci.
the class WorkflowRun method copyLogs.
@GuardedBy("completed")
private void copyLogs() {
if (logsToCopy == null) {
// finished
return;
}
if (logsToCopy instanceof LinkedHashMap) {
// upgrade while build is running
logsToCopy = new ConcurrentSkipListMap<>(logsToCopy);
}
boolean modified = false;
for (Map.Entry<String, Long> entry : logsToCopy.entrySet()) {
String id = entry.getKey();
FlowNode node;
try {
if (execution == null) {
// broken somehow
return;
}
node = execution.getNode(id);
} catch (IOException x) {
LOGGER.log(Level.WARNING, null, x);
logsToCopy.remove(id);
modified = true;
continue;
}
if (node == null) {
LOGGER.log(Level.WARNING, "no such node {0}", id);
logsToCopy.remove(id);
modified = true;
continue;
}
LogAction la = node.getAction(LogAction.class);
if (la != null) {
AnnotatedLargeText<? extends FlowNode> logText = la.getLogText();
try {
long old = entry.getValue();
OutputStream logger;
String prefix = getLogPrefix(node);
if (prefix != null) {
logger = new LogLinePrefixOutputFilter(listener.getLogger(), "[" + prefix + "] ");
} else {
logger = listener.getLogger();
}
try {
long revised = writeRawLogTo(logText, old, logger);
if (revised != old) {
logsToCopy.put(id, revised);
modified = true;
}
if (logText.isComplete()) {
// defend against race condition?
writeRawLogTo(logText, revised, logger);
assert !node.isRunning() : "LargeText.complete yet " + node + " claims to still be running";
logsToCopy.remove(id);
modified = true;
}
} finally {
if (prefix != null) {
((LogLinePrefixOutputFilter) logger).forceEol();
}
}
} catch (IOException x) {
LOGGER.log(Level.WARNING, null, x);
logsToCopy.remove(id);
modified = true;
}
} else if (!node.isRunning()) {
logsToCopy.remove(id);
modified = true;
}
}
if (modified) {
try {
if (this.execution != null && this.execution.getDurabilityHint().isPersistWithEveryStep()) {
save();
}
} catch (IOException x) {
LOGGER.log(Level.WARNING, null, x);
}
}
}
Aggregations