use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class SerializationTestUtil method testSerialization.
/**
* A utility that tests serialization/deserialization.
* @param conf configuration to use, "io.serializations" is read to
* determine the serialization
* @param <K> the class of the item
* @param before item to (de)serialize
* @return deserialized item
*/
public static <K> K testSerialization(Configuration conf, K before) throws Exception {
SerializationFactory factory = new SerializationFactory(conf);
Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(before));
Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(before));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(before);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestWritableSerialization method testWritableComparatorJavaSerialization.
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void testWritableComparatorJavaSerialization() throws Exception {
Serialization ser = new JavaSerialization();
Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
DataOutputBuffer dob = new DataOutputBuffer();
serializer.open(dob);
TestWC orig = new TestWC(0);
serializer.serialize(orig);
serializer.close();
Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), 0, dob.getLength());
deserializer.open(dib);
TestWC deser = deserializer.deserialize(null);
deserializer.close();
assertEquals(orig, deser);
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestViewFsFileStatusHdfs method testFileStatusSerialziation.
@Test
public void testFileStatusSerialziation() throws IOException, URISyntaxException {
long len = fileSystemTestHelper.createFile(fHdfs, testfilename);
FileStatus stat = vfs.getFileStatus(new Path(testfilename));
assertEquals(len, stat.getLen());
// check serialization/deserialization
DataOutputBuffer dob = new DataOutputBuffer();
stat.write(dob);
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), 0, dob.getLength());
FileStatus deSer = new FileStatus();
deSer.readFields(dib);
assertEquals(len, deSer.getLen());
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestLz4CompressorDecompressor method testCompressorDecopressorLogicWithCompressionStreams.
// test compress/decompress process through CompressionOutputStream/CompressionInputStream api
@Test
public void testCompressorDecopressorLogicWithCompressionStreams() {
DataOutputStream deflateOut = null;
DataInputStream inflateIn = null;
int BYTE_SIZE = 1024 * 100;
byte[] bytes = generate(BYTE_SIZE);
int bufferSize = 262144;
int compressionOverhead = (bufferSize / 6) + 32;
try {
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter = new BlockCompressorStream(compressedDataBuffer, new Lz4Compressor(bufferSize), bufferSize, compressionOverhead);
deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
deflateOut.write(bytes, 0, bytes.length);
deflateOut.flush();
deflateFilter.finish();
DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
CompressionInputStream inflateFilter = new BlockDecompressorStream(deCompressedDataBuffer, new Lz4Decompressor(bufferSize), bufferSize);
inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
byte[] result = new byte[BYTE_SIZE];
inflateIn.read(result);
assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
} catch (IOException e) {
fail("testLz4CompressorDecopressorLogicWithCompressionStreams ex error !!!");
} finally {
try {
if (deflateOut != null)
deflateOut.close();
if (inflateIn != null)
inflateIn.close();
} catch (Exception e) {
}
}
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestDelegationToken method testDelegationTokenIdentiferSerializationRoundTrip.
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner, Text renewer, Text realUser) throws IOException {
TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(owner, renewer, realUser);
DataOutputBuffer out = new DataOutputBuffer();
dtid.writeImpl(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
try {
TestDelegationTokenIdentifier dtid2 = new TestDelegationTokenIdentifier();
dtid2.readFields(in);
assertTrue(dtid.equals(dtid2));
return true;
} catch (IOException e) {
return false;
}
}
Aggregations