use of io.kestra.core.runners.RunContext in project kestra by kestra-io.
the class AbstractBash method run.
@SuppressWarnings("deprecation")
protected ScriptOutput run(RunContext runContext, Supplier<String> supplier) throws Exception {
Logger logger = runContext.logger();
if (this.workingDirectory == null) {
this.workingDirectory = runContext.tempDir();
}
additionalVars.put("workingDir", workingDirectory.toAbsolutePath().toString());
List<String> allOutputs = new ArrayList<>();
// deprecated properties
if (this.outputFiles != null && this.outputFiles.size() > 0) {
allOutputs.addAll(this.outputFiles);
}
if (this.outputsFiles != null && this.outputsFiles.size() > 0) {
allOutputs.addAll(this.outputsFiles);
}
if (files != null && files.size() > 0) {
allOutputs.addAll(files);
}
Map<String, String> outputFiles = BashService.createOutputFiles(workingDirectory, allOutputs, additionalVars);
BashService.createInputFiles(runContext, workingDirectory, this.finalInputFiles(), additionalVars);
String commandAsString = supplier.get();
// run
RunResult runResult = this.run(runContext, logger, workingDirectory, finalCommandsWithInterpreter(commandAsString), this.finalEnv(), (inputStream, isStdErr) -> {
AbstractLogThread thread = new LogThread(logger, inputStream, isStdErr, runContext);
thread.setName("bash-log-" + (isStdErr ? "-err" : "-out"));
thread.start();
return thread;
});
// upload output files
Map<String, URI> uploaded = new HashMap<>();
outputFiles.forEach(throwBiConsumer((k, v) -> uploaded.put(k, runContext.putTempFile(new File(runContext.render(v, additionalVars))))));
Map<String, Object> outputsVars = new HashMap<>();
outputsVars.putAll(runResult.getStdOut().getOutputs());
outputsVars.putAll(runResult.getStdErr().getOutputs());
// output
return ScriptOutput.builder().exitCode(runResult.getExitCode()).stdOutLineCount(runResult.getStdOut().getLogsCount()).stdErrLineCount(runResult.getStdErr().getLogsCount()).warningOnStdErr(this.warningOnStdErr).vars(outputsVars).files(uploaded).outputFiles(uploaded).build();
}
use of io.kestra.core.runners.RunContext in project kestra by kestra-io.
the class Counts method run.
@Override
public Output run(RunContext runContext) throws Exception {
Logger logger = runContext.logger();
ExecutionRepositoryInterface executionRepository = runContext.getApplicationContext().getBean(ExecutionRepositoryInterface.class);
String query = null;
if (this.states != null) {
query = "state.current:(" + this.states.stream().map(Enum::name).collect(Collectors.joining(" OR ")) + ")";
}
List<ExecutionCount> executionCounts = executionRepository.executionCounts(flows, query, startDate != null ? ZonedDateTime.parse(runContext.render(startDate)) : null, endDate != null ? ZonedDateTime.parse(runContext.render(endDate)) : null);
logger.trace("{} flows matching filters", executionCounts.size());
List<Result> count = executionCounts.stream().filter(throwPredicate(item -> runContext.render(this.expression, ImmutableMap.of("count", item.getCount().intValue())).equals("true"))).map(item -> Result.builder().namespace(item.getNamespace()).flowId(item.getFlowId()).count(item.getCount()).build()).collect(Collectors.toList());
logger.debug("{} flows matching the expression", count.size());
return Output.builder().results(count).build();
}
use of io.kestra.core.runners.RunContext in project kestra by kestra-io.
the class AbstractScheduler method computeSchedulable.
private void computeSchedulable(List<Flow> flows) {
schedulableNextDate = new HashMap<>();
this.schedulable = flows.stream().filter(flow -> flow.getTriggers() != null && flow.getTriggers().size() > 0).flatMap(flow -> flow.getTriggers().stream().map(trigger -> {
RunContext runContext = runContextFactory.of(flow, trigger);
return new FlowWithTrigger(flow, trigger, runContext, conditionService.conditionContext(runContext, flow, null));
})).filter(flowWithTrigger -> flowWithTrigger.getTrigger() instanceof PollingTriggerInterface).collect(Collectors.toList());
}
use of io.kestra.core.runners.RunContext in project kestra by kestra-io.
the class ProcessBuilderScriptRunner method run.
public RunResult run(AbstractBash abstractBash, RunContext runContext, Logger logger, Path workingDirectory, List<String> commandsWithInterpreter, Map<String, String> env, AbstractBash.LogSupplier logSupplier, Map<String, Object> additionalVars) throws Exception {
ProcessBuilder processBuilder = new ProcessBuilder();
if (env != null && env.size() > 0) {
Map<String, String> environment = processBuilder.environment();
environment.putAll(env.entrySet().stream().map(throwFunction(r -> new AbstractMap.SimpleEntry<>(runContext.render(r.getKey(), additionalVars), runContext.render(r.getValue(), additionalVars)))).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
}
if (workingDirectory != null) {
processBuilder.directory(workingDirectory.toFile());
}
processBuilder.command(commandsWithInterpreter);
Process process = processBuilder.start();
long pid = process.pid();
logger.debug("Starting command with pid {} [{}]", pid, String.join(" ", commandsWithInterpreter));
try {
// logs
AbstractLogThread stdOut = logSupplier.call(process.getInputStream(), false);
AbstractLogThread stdErr = logSupplier.call(process.getErrorStream(), true);
int exitCode = process.waitFor();
stdOut.join();
stdErr.join();
if (exitCode != 0) {
throw new AbstractBash.BashException(exitCode, stdOut.getLogsCount(), stdErr.getLogsCount());
} else {
logger.debug("Command succeed with code " + exitCode);
}
return new RunResult(exitCode, stdOut, stdErr);
} catch (InterruptedException e) {
logger.warn("Killing process {} for InterruptedException", pid);
process.destroy();
throw e;
}
}
use of io.kestra.core.runners.RunContext in project kestra by kestra-io.
the class NodeTest method requirements.
@Test
void requirements() throws Exception {
RunContext runContext = runContextFactory.of();
Map<String, String> files = new HashMap<>();
files.put("main.js", "require('axios').get('http://google.com').then(r => { console.log('::{\"outputs\": {\"extract\":\"' + r.status + '\"}}::') })");
files.put("package.json", "{\"dependencies\":{\"axios\":\"^0.20.0\"}}");
Node node = Node.builder().id("test-node-task").nodePath("node").npmPath("npm").inputFiles(files).build();
ScriptOutput run = node.run(runContext);
assertThat(run.getExitCode(), is(0));
assertThat(run.getVars().get("extract"), is("200"));
}
Aggregations