use of org.apache.commons.io.output.TeeOutputStream in project gradle by gradle.
the class InProcessGradleExecuter method createLoggingManager.
private LoggingManagerInternal createLoggingManager(StartParameter startParameter, final StandardOutputListener outputListener) {
LoggingManagerInternal loggingManager = GLOBAL_SERVICES.getFactory(LoggingManagerInternal.class).create();
loggingManager.captureSystemSources();
ConsoleOutput consoleOutput = startParameter.getConsoleOutput();
if (consoleOutput == ConsoleOutput.Auto) {
// IDEA runs tests attached to a console, use plain so test can assume never attached to a console
// Should really run all tests against a plain and a rich console to make these assumptions explicit
consoleOutput = ConsoleOutput.Plain;
}
loggingManager.attachConsole(new TeeOutputStream(System.out, new LineBufferingOutputStream(new TextStream() {
@Override
public void text(String text) {
outputListener.onOutput(text);
}
@Override
public void endOfStream(@Nullable Throwable failure) {
}
})), consoleOutput);
return loggingManager;
}
use of org.apache.commons.io.output.TeeOutputStream in project data-prep by Talend.
the class PreparationExportStrategy method performPreparation.
public void performPreparation(final ExportParameters parameters, final OutputStream outputStream) {
final String stepId = parameters.getStepId();
final String preparationId = parameters.getPreparationId();
final String formatName = parameters.getExportType();
final PreparationMessage preparation = getPreparation(preparationId, stepId);
final String dataSetId = preparation.getDataSetId();
final ExportFormat format = getFormat(parameters.getExportType());
// get the dataset content (in an auto-closable block to make sure it is properly closed)
boolean releasedIdentity = false;
// Allow get dataset and get dataset metadata access whatever share status is
securityProxy.asTechnicalUser();
final DataSetGet dataSetGet = applicationContext.getBean(DataSetGet.class, dataSetId, false, true);
final DataSetGetMetadata dataSetGetMetadata = applicationContext.getBean(DataSetGetMetadata.class, dataSetId);
try (InputStream datasetContent = dataSetGet.execute()) {
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(datasetContent, UTF_8))) {
// head is not allowed as step id
final String version = getCleanStepId(preparation, stepId);
// Create dataset
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
dataSet.setMetadata(dataSetGetMetadata.execute());
// All good, can already release identity
securityProxy.releaseIdentity();
releasedIdentity = true;
// get the actions to apply (no preparation ==> dataset export ==> no actions)
final String actions = getActions(preparationId, version);
final TransformationCacheKey key = //
cacheKeyGenerator.generateContentKey(//
dataSetId, //
preparationId, //
version, //
formatName, //
parameters.getFrom(), //
parameters.getArguments(), //
parameters.getFilter());
LOGGER.debug("Cache key: " + key.getKey());
LOGGER.debug("Cache key details: " + key.toString());
try (final TeeOutputStream tee = new TeeOutputStream(outputStream, contentCache.put(key, ContentCache.TimeToLive.DEFAULT))) {
final Configuration configuration = //
Configuration.builder().args(//
parameters.getArguments()).outFilter(//
rm -> filterService.build(parameters.getFilter(), rm)).sourceType(parameters.getFrom()).format(//
format.getName()).actions(//
actions).preparation(//
preparation).stepId(//
version).volume(//
Configuration.Volume.SMALL).output(//
tee).limit(//
limit).build();
factory.get(configuration).buildExecutable(dataSet, configuration).execute();
tee.flush();
} catch (Throwable e) {
// NOSONAR
contentCache.evict(key);
throw e;
}
}
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(TransformationErrorCodes.UNABLE_TO_TRANSFORM_DATASET, e);
} finally {
if (!releasedIdentity) {
// Release identity in case of error.
securityProxy.releaseIdentity();
}
}
}
use of org.apache.commons.io.output.TeeOutputStream in project data-prep by Talend.
the class ApplyPreparationExportStrategy method executeApplyPreparation.
private void executeApplyPreparation(ExportParameters parameters, OutputStream outputStream) {
final String stepId = parameters.getStepId();
final String preparationId = parameters.getPreparationId();
final String formatName = parameters.getExportType();
final Preparation preparation = getPreparation(preparationId);
final String dataSetId = parameters.getDatasetId();
final ExportFormat format = getFormat(parameters.getExportType());
// dataset content must be retrieved as the technical user because it might not be shared
boolean technicianIdentityReleased = false;
securityProxy.asTechnicalUser();
// get the dataset content (in an auto-closable block to make sure it is properly closed)
final boolean fullContent = parameters.getFrom() == ExportParameters.SourceType.FILTER;
final DataSetGet dataSetGet = applicationContext.getBean(DataSetGet.class, dataSetId, fullContent, true);
try (final InputStream datasetContent = dataSetGet.execute();
final JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(datasetContent, UTF_8))) {
// release the technical user identity
securityProxy.releaseIdentity();
technicianIdentityReleased = true;
// head is not allowed as step id
final String version = getCleanStepId(preparation, stepId);
// Create dataset
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
// get the actions to apply (no preparation ==> dataset export ==> no actions)
final String actions = getActions(preparationId, version);
// create tee to broadcast to cache + service output
final TransformationCacheKey key = //
cacheKeyGenerator.generateContentKey(//
dataSetId, //
preparationId, //
version, //
formatName, //
parameters.getFrom(), //
parameters.getArguments(), //
parameters.getFilter());
LOGGER.debug("Cache key: " + key.getKey());
LOGGER.debug("Cache key details: " + key.toString());
try (final TeeOutputStream tee = new TeeOutputStream(outputStream, contentCache.put(key, ContentCache.TimeToLive.DEFAULT))) {
final Configuration.Builder configurationBuilder = //
Configuration.builder().args(//
parameters.getArguments()).outFilter(//
rm -> filterService.build(parameters.getFilter(), rm)).sourceType(parameters.getFrom()).format(//
format.getName()).actions(//
actions).preparation(//
getPreparation(preparationId)).stepId(//
version).volume(//
SMALL).output(//
tee).limit(this.limit);
// no need for statistics if it's not JSON output
if (!Objects.equals(format.getName(), JSON)) {
configurationBuilder.globalStatistics(false);
}
final Configuration configuration = configurationBuilder.build();
factory.get(configuration).buildExecutable(dataSet, configuration).execute();
tee.flush();
} catch (Throwable e) {
// NOSONAR
LOGGER.debug("evicting cache {}", key.getKey());
contentCache.evict(key);
throw e;
}
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(TransformationErrorCodes.UNABLE_TO_TRANSFORM_DATASET, e);
} finally {
if (!technicianIdentityReleased) {
securityProxy.releaseIdentity();
}
}
}
use of org.apache.commons.io.output.TeeOutputStream in project elasticsearch-maven-plugin by alexcojocaru.
the class ProcessUtil method executeScript.
/**
* Run the given command as a process within the supplied instance config context
* and wait until it finalizes. An ElasticsearchSetupException is thrown if the exit code
* is not 0.
* @param config - the instance config
* @param command - the command to execute
* @param environment - a map of environment variables; can be null
* @param processDestroyer - a destroyer handler for the spawned process; can be null
* @param disableLogging - whether to disable the logging of the command or not
* @return the output (not trimmed of whitespaces) of the given command, as separate lines
*/
public static List<String> executeScript(InstanceConfiguration config, CommandLine command, Map<String, String> environment, ProcessDestroyer processDestroyer, boolean disableLogging) {
Log log = config.getClusterConfiguration().getLog();
int instanceId = config.getId();
File baseDir = new File(config.getBaseDir());
Map<String, String> completeEnvironment = createEnvironment(environment);
DefaultExecutor executor = new DefaultExecutor();
executor.setWorkingDirectory(baseDir);
// allows null
executor.setProcessDestroyer(processDestroyer);
// set up a tap on the output stream, to collect to output and return it from this method
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ByteArrayOutputStream errorStream = new ByteArrayOutputStream();
executor.setStreamHandler(new PumpStreamHandler(disableLogging ? outputStream : new TeeOutputStream(System.out, outputStream), disableLogging ? errorStream : new TeeOutputStream(System.err, errorStream)));
try {
log.debug(String.format("Using environment: %s", completeEnvironment));
String commandMessage = String.format("Elasticsearch[%d]: Executing command '%s' in directory '%s'", instanceId, command.toString(), baseDir);
if (disableLogging) {
log.debug(commandMessage);
} else {
log.info(commandMessage);
}
int exitCode = executor.execute(command, completeEnvironment);
if (exitCode != 0) {
throw new ElasticsearchSetupException(String.format("Elasticsearch [%d]: Command '%s' in directory '%s' finished with exit code %d; see above for details", instanceId, command, baseDir, exitCode));
}
String resultMessage = String.format("Elasticsearch[%d]: The process finished with exit code %d", instanceId, exitCode);
if (disableLogging) {
log.debug(resultMessage);
} else {
log.info(resultMessage);
}
} catch (IOException e) {
List<String> output = readBuffer(outputStream);
List<String> error = readBuffer(errorStream);
String lineSeparator = System.getProperty("line.separator");
StringBuilder message = new StringBuilder();
message.append("Elasticsearch [");
message.append(instanceId);
message.append("]: Cannot execute command '");
message.append(command);
message.append("' in directory '");
message.append(baseDir);
message.append("'");
message.append(lineSeparator);
message.append("Output:");
message.append(lineSeparator);
message.append(StringUtils.join(output, lineSeparator));
message.append(lineSeparator);
message.append("Error:");
message.append(lineSeparator);
message.append(StringUtils.join(error, lineSeparator));
throw new ElasticsearchSetupException(message.toString(), e);
}
return readBuffer(outputStream);
}
use of org.apache.commons.io.output.TeeOutputStream in project oozie by apache.
the class Hive2Main method runBeeline.
private void runBeeline(String[] args, String logFile) throws Exception {
// We do this instead of calling BeeLine.main so we can duplicate the error stream for harvesting Hadoop child job IDs
BeeLine beeLine = new BeeLine();
beeLine.setErrorStream(new PrintStream(new TeeOutputStream(System.err, new FileOutputStream(logFile)), false, "UTF-8"));
int status = beeLine.begin(args, null);
beeLine.close();
if (status != 0) {
System.exit(status);
}
}
Aggregations