use of java.io.PipedInputStream in project gocd by gocd.
the class StreamPumperTest method shouldNotHaveExpiredTimeoutWhenCompleted.
@Test
public void shouldNotHaveExpiredTimeoutWhenCompleted() throws Exception {
PipedOutputStream output = new PipedOutputStream();
InputStream inputStream = new PipedInputStream(output);
TestingClock clock = new TestingClock();
StreamPumper pumper = new StreamPumper(inputStream, new TestConsumer(), "", "utf-8", clock);
new Thread(pumper).start();
output.write("line1\n".getBytes());
output.flush();
output.close();
pumper.readToEnd();
clock.addSeconds(2);
assertThat(pumper.didTimeout(1L, TimeUnit.SECONDS), is(false));
}
use of java.io.PipedInputStream in project JMRI by JMRI.
the class RfidStreamPortControllerTest method setUp.
// The minimal setup for log4J
@Override
@Before
public void setUp() {
apps.tests.Log4JFixture.setUp();
JUnitUtil.resetInstanceManager();
try {
PipedInputStream tempPipe;
tempPipe = new PipedInputStream();
DataOutputStream ostream = new DataOutputStream(new PipedOutputStream(tempPipe));
tempPipe = new PipedInputStream();
DataInputStream istream = new DataInputStream(tempPipe);
apc = new RfidStreamPortController(istream, ostream, "Test");
} catch (java.io.IOException ioe) {
Assert.fail("IOException creating stream");
}
}
use of java.io.PipedInputStream in project JMRI by JMRI.
the class SprogCSStreamPortControllerTest method setUp.
// The minimal setup for log4J
@Before
@Override
public void setUp() {
apps.tests.Log4JFixture.setUp();
JUnitUtil.resetInstanceManager();
try {
PipedInputStream tempPipe;
tempPipe = new PipedInputStream();
DataOutputStream ostream = new DataOutputStream(new PipedOutputStream(tempPipe));
tempPipe = new PipedInputStream();
DataInputStream istream = new DataInputStream(tempPipe);
apc = new SprogCSStreamPortController(istream, ostream, "Test");
} catch (java.io.IOException ioe) {
Assert.fail("IOException creating stream");
}
}
use of java.io.PipedInputStream in project gradle by gradle.
the class ForwardClientInput method execute.
public void execute(final DaemonCommandExecution execution) {
final PipedOutputStream inputSource = new PipedOutputStream();
final PipedInputStream replacementStdin;
try {
replacementStdin = new PipedInputStream(inputSource);
} catch (IOException e) {
throw UncheckedException.throwAsUncheckedException(e);
}
execution.getConnection().onStdin(new StdinHandler() {
public void onInput(ForwardInput input) {
LOGGER.debug("Writing forwarded input on daemon's stdin.");
try {
inputSource.write(input.getBytes());
} catch (IOException e) {
LOGGER.warn("Received exception trying to forward client input.", e);
}
}
public void onEndOfInput() {
LOGGER.info("Closing daemon's stdin at end of input.");
try {
inputSource.close();
} catch (IOException e) {
LOGGER.warn("Problem closing output stream connected to replacement stdin", e);
} finally {
LOGGER.info("The daemon will no longer process any standard input.");
}
}
});
try {
try {
new StdinSwapper().swap(replacementStdin, new Callable<Void>() {
public Void call() {
execution.proceed();
return null;
}
});
} finally {
execution.getConnection().onStdin(null);
IOUtils.closeQuietly(replacementStdin);
IOUtils.closeQuietly(inputSource);
}
} catch (Exception e) {
throw UncheckedException.throwAsUncheckedException(e);
}
}
use of java.io.PipedInputStream in project zeppelin by apache.
the class PySparkInterpreter method createGatewayServerAndStartScript.
private void createGatewayServerAndStartScript() {
// create python script
createPythonScript();
port = findRandomOpenPortOnAllLocalInterfaces();
gatewayServer = new GatewayServer(this, port);
gatewayServer.start();
// Run python shell
// Choose python in the order of
// PYSPARK_DRIVER_PYTHON > PYSPARK_PYTHON > zeppelin.pyspark.python
String pythonExec = getProperty("zeppelin.pyspark.python");
if (System.getenv("PYSPARK_PYTHON") != null) {
pythonExec = System.getenv("PYSPARK_PYTHON");
}
if (System.getenv("PYSPARK_DRIVER_PYTHON") != null) {
pythonExec = System.getenv("PYSPARK_DRIVER_PYTHON");
}
CommandLine cmd = CommandLine.parse(pythonExec);
cmd.addArgument(scriptPath, false);
cmd.addArgument(Integer.toString(port), false);
cmd.addArgument(Integer.toString(getSparkInterpreter().getSparkVersion().toNumber()), false);
executor = new DefaultExecutor();
outputStream = new InterpreterOutputStream(logger);
PipedOutputStream ps = new PipedOutputStream();
in = null;
try {
in = new PipedInputStream(ps);
} catch (IOException e1) {
throw new InterpreterException(e1);
}
ins = new BufferedWriter(new OutputStreamWriter(ps));
input = new ByteArrayOutputStream();
PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
executor.setStreamHandler(streamHandler);
executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
try {
Map env = setupPySparkEnv();
executor.execute(cmd, env, this);
pythonscriptRunning = true;
} catch (IOException e) {
throw new InterpreterException(e);
}
try {
input.write("import sys, getopt\n".getBytes());
ins.flush();
} catch (IOException e) {
throw new InterpreterException(e);
}
}
Aggregations