use of java.io.OutputStream in project hadoop by apache.
the class CryptoStreamsTestBase method testPositionedRead.
/** Test positioned read. */
@Test(timeout = 120000)
public void testPositionedRead() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
InputStream in = getInputStream(defaultBufferSize);
// Pos: 1/3 dataLen
positionedReadCheck(in, dataLen / 3);
// Pos: 1/2 dataLen
positionedReadCheck(in, dataLen / 2);
in.close();
}
use of java.io.OutputStream in project hadoop by apache.
the class CryptoStreamsTestBase method testByteBufferRead.
/** Test byte buffer read with different buffer size. */
@Test(timeout = 120000)
public void testByteBufferRead() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
// Default buffer size, initial buffer position is 0
InputStream in = getInputStream(defaultBufferSize);
ByteBuffer buf = ByteBuffer.allocate(dataLen + 100);
byteBufferReadCheck(in, buf, 0);
in.close();
// Default buffer size, initial buffer position is not 0
in = getInputStream(defaultBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
// Small buffer size, initial buffer position is 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 0);
in.close();
// Small buffer size, initial buffer position is not 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
// Direct buffer, default buffer size, initial buffer position is 0
in = getInputStream(defaultBufferSize);
buf = ByteBuffer.allocateDirect(dataLen + 100);
byteBufferReadCheck(in, buf, 0);
in.close();
// Direct buffer, default buffer size, initial buffer position is not 0
in = getInputStream(defaultBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
// Direct buffer, small buffer size, initial buffer position is 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 0);
in.close();
// Direct buffer, small buffer size, initial buffer position is not 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
}
use of java.io.OutputStream in project hadoop by apache.
the class CryptoStreamsTestBase method testSkip.
/** Test skip. */
@Test(timeout = 120000)
public void testSkip() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
// Default buffer size
InputStream in = getInputStream(defaultBufferSize);
byte[] result = new byte[dataLen];
int n1 = readAll(in, result, 0, dataLen / 3);
Assert.assertEquals(n1, ((Seekable) in).getPos());
long skipped = in.skip(dataLen / 3);
int n2 = readAll(in, result, 0, dataLen);
Assert.assertEquals(dataLen, n1 + skipped + n2);
byte[] readData = new byte[n2];
System.arraycopy(result, 0, readData, 0, n2);
byte[] expectedData = new byte[n2];
System.arraycopy(data, dataLen - n2, expectedData, 0, n2);
Assert.assertArrayEquals(readData, expectedData);
try {
skipped = in.skip(-3);
Assert.fail("Skip Negative length should fail.");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("Negative skip length", e);
}
// Skip after EOF
skipped = in.skip(3);
Assert.assertEquals(skipped, 0);
in.close();
}
use of java.io.OutputStream in project hadoop by apache.
the class CryptoStreamsTestBase method syncableCheck.
private void syncableCheck() throws IOException {
OutputStream out = getOutputStream(smallBufferSize);
try {
int bytesWritten = dataLen / 3;
out.write(data, 0, bytesWritten);
((Syncable) out).hflush();
InputStream in = getInputStream(defaultBufferSize);
verify(in, bytesWritten, data);
in.close();
out.write(data, bytesWritten, dataLen - bytesWritten);
((Syncable) out).hsync();
in = getInputStream(defaultBufferSize);
verify(in, dataLen, data);
in.close();
} finally {
out.close();
}
}
use of java.io.OutputStream in project hadoop by apache.
the class CryptoStreamsTestBase method testReadFully.
/** Test read fully */
@Test(timeout = 120000)
public void testReadFully() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
InputStream in = getInputStream(defaultBufferSize);
final int len1 = dataLen / 4;
// Read len1 bytes
byte[] readData = new byte[len1];
readAll(in, readData, 0, len1);
byte[] expectedData = new byte[len1];
System.arraycopy(data, 0, expectedData, 0, len1);
Assert.assertArrayEquals(readData, expectedData);
// Pos: 1/3 dataLen
readFullyCheck(in, dataLen / 3);
// Read len1 bytes
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, len1, expectedData, 0, len1);
Assert.assertArrayEquals(readData, expectedData);
// Pos: 1/2 dataLen
readFullyCheck(in, dataLen / 2);
// Read len1 bytes
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, 2 * len1, expectedData, 0, len1);
Assert.assertArrayEquals(readData, expectedData);
in.close();
}
Aggregations