use of java.io.PipedOutputStream in project h2database by h2database.
the class TestShell method test.
private void test(final boolean commandLineArgs) throws IOException {
PipedInputStream testIn = new PipedInputStream();
PipedOutputStream out = new PipedOutputStream(testIn);
toolOut = new PrintStream(out, true);
out = new PipedOutputStream();
PrintStream testOut = new PrintStream(out, true);
toolIn = new PipedInputStream(out);
Task task = new Task() {
@Override
public void call() throws Exception {
try {
Shell shell = new Shell();
shell.setIn(toolIn);
shell.setOut(toolOut);
shell.setErr(toolOut);
if (commandLineArgs) {
shell.runTool("-url", "jdbc:h2:mem:", "-user", "sa", "-password", "sa");
} else {
shell.runTool();
}
} finally {
toolOut.close();
}
}
};
task.execute();
InputStreamReader reader = new InputStreamReader(testIn);
lineReader = new LineNumberReader(reader);
read("");
read("Welcome to H2 Shell");
read("Exit with");
if (!commandLineArgs) {
read("[Enter]");
testOut.println("jdbc:h2:mem:");
read("URL");
testOut.println("");
read("Driver");
testOut.println("sa");
read("User");
testOut.println("sa");
read("Password");
}
read("Commands are case insensitive");
read("help or ?");
read("list");
read("maxwidth");
read("autocommit");
read("history");
read("quit or exit");
read("");
testOut.println("history");
read("sql> No history");
testOut.println("1");
read("sql> Not found");
testOut.println("select 1 a;");
read("sql> A");
read("1");
read("(1 row,");
testOut.println("history");
read("sql> #1: select 1 a");
read("To re-run a statement, type the number and press and enter");
testOut.println("1");
read("sql> select 1 a");
read("A");
read("1");
read("(1 row,");
testOut.println("select 'x' || space(1000) large, 'y' small;");
read("sql> LARGE");
read("x");
read("(data is partially truncated)");
read("(1 row,");
testOut.println("select x, 's' s from system_range(0, 10001);");
read("sql> X | S");
for (int i = 0; i < 10000; i++) {
read((i + " ").substring(0, 4) + " | s");
}
for (int i = 10000; i <= 10001; i++) {
read((i + " ").substring(0, 5) + " | s");
}
read("(10002 rows,");
testOut.println("select error;");
read("sql> Error:");
if (read("").startsWith("Column \"ERROR\" not found")) {
read("");
}
testOut.println("create table test(id int primary key, name varchar)\n;");
read("sql> ...>");
testOut.println("insert into test values(1, 'Hello');");
read("sql>");
testOut.println("select null n, * from test;");
read("sql> N | ID | NAME");
read("null | 1 | Hello");
read("(1 row,");
// test history
for (int i = 0; i < 30; i++) {
testOut.println("select " + i + " ID from test;");
read("sql> ID");
read("" + i);
read("(1 row,");
}
testOut.println("20");
read("sql> select 10 ID from test");
read("ID");
read("10");
read("(1 row,");
testOut.println("maxwidth");
read("sql> Usage: maxwidth <integer value>");
read("Maximum column width is now 100");
testOut.println("maxwidth 80");
read("sql> Maximum column width is now 80");
testOut.println("autocommit");
read("sql> Usage: autocommit [true|false]");
read("Autocommit is now true");
testOut.println("autocommit false");
read("sql> Autocommit is now false");
testOut.println("autocommit true");
read("sql> Autocommit is now true");
testOut.println("list");
read("sql> Result list mode is now on");
testOut.println("select 1 first, 2 second;");
read("sql> FIRST : 1");
read("SECOND: 2");
read("(1 row, ");
testOut.println("select x from system_range(1, 3);");
read("sql> X: 1");
read("");
read("X: 2");
read("");
read("X: 3");
read("(3 rows, ");
testOut.println("select x, 2 as y from system_range(1, 3) where 1 = 0;");
read("sql> X");
read("Y");
read("(0 rows, ");
testOut.println("list");
read("sql> Result list mode is now off");
testOut.println("help");
read("sql> Commands are case insensitive");
read("help or ?");
read("list");
read("maxwidth");
read("autocommit");
read("history");
read("quit or exit");
read("");
testOut.println("exit");
read("sql>");
task.get();
}
use of java.io.PipedOutputStream in project h2database by h2database.
the class JdbcBlob method setBinaryStream.
/**
* Get a writer to update the Blob. This is only supported for new, empty
* Blob objects that were created with Connection.createBlob(). The Blob is
* created in a separate thread, and the object is only updated when
* OutputStream.close() is called. The position must be 1, meaning the whole
* Blob data is set.
*
* @param pos where to start writing (the first byte is at position 1)
* @return an output stream
*/
@Override
public OutputStream setBinaryStream(long pos) throws SQLException {
try {
if (isDebugEnabled()) {
debugCode("setBinaryStream(" + pos + ");");
}
checkClosed();
if (pos != 1) {
throw DbException.getInvalidValueException("pos", pos);
}
if (value.getPrecision() != 0) {
throw DbException.getInvalidValueException("length", value.getPrecision());
}
// local variable avoids generating synthetic accessor method
final JdbcConnection c = conn;
final PipedInputStream in = new PipedInputStream();
final Task task = new Task() {
@Override
public void call() {
value = c.createBlob(in, -1);
}
};
PipedOutputStream out = new PipedOutputStream(in) {
@Override
public void close() throws IOException {
super.close();
try {
task.get();
} catch (Exception e) {
throw DbException.convertToIOException(e);
}
}
};
task.execute();
return new BufferedOutputStream(out);
} catch (Exception e) {
throw logAndConvert(e);
}
}
use of java.io.PipedOutputStream in project h2database by h2database.
the class JdbcClob method setCharacterStream.
/**
* Get a writer to update the Clob. This is only supported for new, empty
* Clob objects that were created with Connection.createClob() or
* createNClob(). The Clob is created in a separate thread, and the object
* is only updated when Writer.close() is called. The position must be 1,
* meaning the whole Clob data is set.
*
* @param pos where to start writing (the first character is at position 1)
* @return a writer
*/
@Override
public Writer setCharacterStream(long pos) throws SQLException {
try {
if (isDebugEnabled()) {
debugCodeCall("setCharacterStream(" + pos + ");");
}
checkClosed();
if (pos != 1) {
throw DbException.getInvalidValueException("pos", pos);
}
if (value.getPrecision() != 0) {
throw DbException.getInvalidValueException("length", value.getPrecision());
}
// required to avoid synthetic method creation
final JdbcConnection c = conn;
// PipedReader / PipedWriter are a lot slower
// than PipedInputStream / PipedOutputStream
// (Sun/Oracle Java 1.6.0_20)
final PipedInputStream in = new PipedInputStream();
final Task task = new Task() {
@Override
public void call() {
value = c.createClob(IOUtils.getReader(in), -1);
}
};
PipedOutputStream out = new PipedOutputStream(in) {
@Override
public void close() throws IOException {
super.close();
try {
task.get();
} catch (Exception e) {
throw DbException.convertToIOException(e);
}
}
};
task.execute();
return IOUtils.getBufferedWriter(out);
} catch (Exception e) {
throw logAndConvert(e);
}
}
use of java.io.PipedOutputStream in project cdap by caskdata.
the class RecordWithString method testReduceProjection.
@Test
public void testReduceProjection() throws IOException, UnsupportedTypeException {
PipedOutputStream output = new PipedOutputStream();
PipedInputStream input = new PipedInputStream(output);
Schema sourceSchema = new ReflectionSchemaGenerator().generate(MoreFields.class);
Schema targetSchema = new ReflectionSchemaGenerator().generate(LessFields.class);
MoreFields moreFields = new MoreFields(10, 20.2, "30", ImmutableList.of("1", "2"));
new ReflectionDatumWriter<MoreFields>(sourceSchema).encode(moreFields, new BinaryEncoder(output));
LessFields lessFields = new ReflectionDatumReader<>(targetSchema, TypeToken.of(LessFields.class)).read(new BinaryDecoder(input), sourceSchema);
Assert.assertEquals("30", lessFields.k);
Assert.assertEquals(moreFields.inner.b, lessFields.inner.b);
}
use of java.io.PipedOutputStream in project cdap by caskdata.
the class ASMDatumCodecTest method testPrimitiveArray.
@Test
public void testPrimitiveArray() throws IOException, UnsupportedTypeException {
TypeToken<int[]> type = new TypeToken<int[]>() {
};
PipedOutputStream os = new PipedOutputStream();
PipedInputStream is = new PipedInputStream(os);
int[] writeValue = { 1, 2, 3, 4, -5, -6, -7, -8 };
DatumWriter<int[]> writer = getWriter(type);
writer.encode(writeValue, new BinaryEncoder(os));
ReflectionDatumReader<int[]> reader = new ReflectionDatumReader<>(getSchema(type), type);
int[] value = reader.read(new BinaryDecoder(is), getSchema(type));
Assert.assertArrayEquals(writeValue, value);
}
Aggregations