use of java.io.ByteArrayInputStream in project hadoop by apache.
the class TestSnappyCompressorDecompressor method testCompressorDecompressorEmptyStreamLogic.
@Test
public void testCompressorDecompressorEmptyStreamLogic() {
ByteArrayInputStream bytesIn = null;
ByteArrayOutputStream bytesOut = null;
byte[] buf = null;
BlockDecompressorStream blockDecompressorStream = null;
try {
// compress empty stream
bytesOut = new ByteArrayOutputStream();
BlockCompressorStream blockCompressorStream = new BlockCompressorStream(bytesOut, new SnappyCompressor(), 1024, 0);
// close without write
blockCompressorStream.close();
// check compressed output
buf = bytesOut.toByteArray();
assertEquals("empty stream compressed output size != 4", 4, buf.length);
// use compressed output as input for decompression
bytesIn = new ByteArrayInputStream(buf);
// create decompression stream
blockDecompressorStream = new BlockDecompressorStream(bytesIn, new SnappyDecompressor(), 1024);
// no byte is available because stream was closed
assertEquals("return value is not -1", -1, blockDecompressorStream.read());
} catch (Exception e) {
fail("testCompressorDecompressorEmptyStreamLogic ex error !!!" + e.getMessage());
} finally {
if (blockDecompressorStream != null)
try {
bytesIn.close();
bytesOut.close();
blockDecompressorStream.close();
} catch (IOException e) {
}
}
}
use of java.io.ByteArrayInputStream in project hadoop by apache.
the class TestResponseBuffer method checkBuffer.
private void checkBuffer(ResponseBuffer buf, String expected) throws IOException {
// buffer payload length matches expected length
int expectedLength = expected.getBytes().length;
assertEquals(expectedLength, buf.size());
// buffer has the framing bytes (int)
byte[] framed = buf.toByteArray();
assertEquals(expectedLength + 4, framed.length);
// verify encoding of buffer: framing (int) + payload bytes
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(framed));
assertEquals(expectedLength, dis.readInt());
assertEquals(expectedLength, dis.available());
byte[] payload = new byte[expectedLength];
dis.readFully(payload);
assertEquals(expected, new String(payload));
}
use of java.io.ByteArrayInputStream in project hadoop by apache.
the class TestLineReader method testCustomDelimiter.
@Test
public void testCustomDelimiter() throws Exception {
/* TEST_1
* The test scenario is the tail of the buffer
* equals the starting character/s of delimiter
*
* The Test Data is such that,
*
* 1) we will have "</entity>" as delimiter
*
* 2) The tail of the current buffer would be "</"
* which matches with the starting character sequence of delimiter.
*
* 3) The Head of the next buffer would be "id>"
* which does NOT match with the remaining characters of delimiter.
*
* 4) Input data would be prefixed by char 'a'
* about numberOfCharToFillTheBuffer times.
* So that, one iteration to buffer the input data,
* would end at '</' ie equals starting 2 char of delimiter
*
* 5) For this we would take BufferSize as 64 * 1024;
*
* Check Condition
* In the second key value pair, the value should contain
* "</" from currentToken and
* "id>" from next token
*/
Delimiter = "</entity>";
String CurrentBufferTailToken = "</entity><entity><id>Gelesh</";
// Ending part of Input Data Buffer
// It contains '</' ie delimiter character
String NextBufferHeadToken = "id><name>Omathil</name></entity>";
// Supposing the start of next buffer is this
String Expected = (CurrentBufferTailToken + NextBufferHeadToken).replace(Delimiter, "");
// Expected ,must capture from both the buffer, excluding Delimiter
String TestPartOfInput = CurrentBufferTailToken + NextBufferHeadToken;
int BufferSize = 64 * 1024;
int numberOfCharToFillTheBuffer = BufferSize - CurrentBufferTailToken.length();
StringBuilder fillerString = new StringBuilder();
for (int i = 0; i < numberOfCharToFillTheBuffer; i++) {
// char 'a' as a filler for the test string
fillerString.append('a');
}
TestData = fillerString + TestPartOfInput;
lineReader = new LineReader(new ByteArrayInputStream(TestData.getBytes()), Delimiter.getBytes());
line = new Text();
lineReader.readLine(line);
Assert.assertEquals(fillerString.toString(), line.toString());
lineReader.readLine(line);
Assert.assertEquals(Expected, line.toString());
/*TEST_2
* The test scenario is such that,
* the character/s preceding the delimiter,
* equals the starting character/s of delimiter
*/
Delimiter = "record";
StringBuilder TestStringBuilder = new StringBuilder();
TestStringBuilder.append(Delimiter + "Kerala ");
TestStringBuilder.append(Delimiter + "Bangalore");
TestStringBuilder.append(Delimiter + " North Korea");
TestStringBuilder.append(Delimiter + Delimiter + "Guantanamo");
TestStringBuilder.append(Delimiter + "ecord" + "recor" + //~EOF with 're'
"core");
TestData = TestStringBuilder.toString();
lineReader = new LineReader(new ByteArrayInputStream(TestData.getBytes()), Delimiter.getBytes());
lineReader.readLine(line);
Assert.assertEquals("", line.toString());
lineReader.readLine(line);
Assert.assertEquals("Kerala ", line.toString());
lineReader.readLine(line);
Assert.assertEquals("Bangalore", line.toString());
lineReader.readLine(line);
Assert.assertEquals(" North Korea", line.toString());
lineReader.readLine(line);
Assert.assertEquals("", line.toString());
lineReader.readLine(line);
Assert.assertEquals("Guantanamo", line.toString());
lineReader.readLine(line);
Assert.assertEquals(("ecord" + "recor" + "core"), line.toString());
// Test 3
// The test scenario is such that,
// aaaabccc split by aaab
TestData = "aaaabccc";
Delimiter = "aaab";
lineReader = new LineReader(new ByteArrayInputStream(TestData.getBytes()), Delimiter.getBytes());
lineReader.readLine(line);
Assert.assertEquals("a", line.toString());
lineReader.readLine(line);
Assert.assertEquals("ccc", line.toString());
}
use of java.io.ByteArrayInputStream in project hadoop by apache.
the class TestProtoUtil method doVarIntTest.
private void doVarIntTest(int value) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CodedOutputStream cout = CodedOutputStream.newInstance(baos);
cout.writeRawVarint32(value);
cout.flush();
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
assertEquals(value, ProtoUtil.readRawVarint32(dis));
}
use of java.io.ByteArrayInputStream in project hadoop by apache.
the class TestHttpExceptionUtils method testValidateResponseJsonErrorKnownException.
@Test
public void testValidateResponseJsonErrorKnownException() throws IOException {
Map<String, Object> json = new HashMap<String, Object>();
json.put(HttpExceptionUtils.ERROR_EXCEPTION_JSON, IllegalStateException.class.getSimpleName());
json.put(HttpExceptionUtils.ERROR_CLASSNAME_JSON, IllegalStateException.class.getName());
json.put(HttpExceptionUtils.ERROR_MESSAGE_JSON, "EX");
Map<String, Object> response = new HashMap<String, Object>();
response.put(HttpExceptionUtils.ERROR_JSON, json);
ObjectMapper jsonMapper = new ObjectMapper();
String msg = jsonMapper.writeValueAsString(response);
InputStream is = new ByteArrayInputStream(msg.getBytes());
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getErrorStream()).thenReturn(is);
Mockito.when(conn.getResponseMessage()).thenReturn("msg");
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_BAD_REQUEST);
try {
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_CREATED);
Assert.fail();
} catch (IllegalStateException ex) {
Assert.assertEquals("EX", ex.getMessage());
}
}
Aggregations