use of org.apache.cassandra.io.util.DataInputBuffer in project cassandra by apache.
the class MessageOutBench method serialize.
private int serialize(int messagingVersion) throws IOException {
try (DataOutputBuffer out = new DataOutputBuffer()) {
Message.serializer.serialize(Message.builder(msgOut).withCreatedAt(nanoTime()).withId(42).build(), out, messagingVersion);
DataInputBuffer in = new DataInputBuffer(out.buffer(), false);
Message.serializer.deserialize(in, addr, messagingVersion);
return msgOut.serializedSize(messagingVersion);
}
}
use of org.apache.cassandra.io.util.DataInputBuffer in project cassandra by apache.
the class ColumnsTest method testSerialize.
private void testSerialize(Columns columns, List<ColumnMetadata> definitions) throws IOException {
try (DataOutputBuffer out = new DataOutputBuffer()) {
Columns.serializer.serialize(columns, out);
Assert.assertEquals(Columns.serializer.serializedSize(columns), out.buffer().remaining());
Columns deserialized = Columns.serializer.deserialize(new DataInputBuffer(out.buffer(), false), mock(columns));
Assert.assertEquals(columns, deserialized);
Assert.assertEquals(columns.hashCode(), deserialized.hashCode());
assertContents(deserialized, definitions);
}
}
use of org.apache.cassandra.io.util.DataInputBuffer in project cassandra by apache.
the class CassandraEntireSSTableStreamWriterTest method testBlockReadingAndWritingOverWire.
@Test
public void testBlockReadingAndWritingOverWire() throws Throwable {
StreamSession session = setupStreamingSessionForTest();
InetAddressAndPort peer = FBUtilities.getBroadcastAddressAndPort();
// This is needed as Netty releases the ByteBuffers as soon as the channel is flushed
ByteBuf serializedFile = Unpooled.buffer(8192);
EmbeddedChannel channel = createMockNettyChannel(serializedFile);
try (AsyncStreamingOutputPlus out = new AsyncStreamingOutputPlus(channel);
ComponentContext context = ComponentContext.create(descriptor)) {
CassandraEntireSSTableStreamWriter writer = new CassandraEntireSSTableStreamWriter(sstable, session, context);
writer.write(out);
session.prepareReceiving(new StreamSummary(sstable.metadata().id, 1, 5104));
CassandraStreamHeader header = CassandraStreamHeader.builder().withSSTableFormat(sstable.descriptor.formatType).withSSTableVersion(sstable.descriptor.version).withSSTableLevel(0).withEstimatedKeys(sstable.estimatedKeys()).withSections(Collections.emptyList()).withSerializationHeader(sstable.header.toComponent()).withComponentManifest(context.manifest()).isEntireSSTable(true).withFirstKey(sstable.first).withTableId(sstable.metadata().id).build();
CassandraEntireSSTableStreamReader reader = new CassandraEntireSSTableStreamReader(new StreamMessageHeader(sstable.metadata().id, peer, session.planId(), false, 0, 0, 0, null), header, session);
SSTableMultiWriter sstableWriter = reader.read(new DataInputBuffer(serializedFile.nioBuffer(), false));
Collection<SSTableReader> newSstables = sstableWriter.finished();
assertEquals(1, newSstables.size());
}
}
use of org.apache.cassandra.io.util.DataInputBuffer in project cassandra by apache.
the class ComponentManifestTest method testSerialization_FailsOnBadBytes.
@Test(expected = EOFException.class)
public void testSerialization_FailsOnBadBytes() throws IOException {
ByteBuffer buf = ByteBuffer.allocate(512);
ComponentManifest expected = new ComponentManifest(new LinkedHashMap<Component, Long>() {
{
put(Component.DATA, 100L);
}
});
DataOutputBufferFixed out = new DataOutputBufferFixed(buf);
ComponentManifest.serializer.serialize(expected, out, MessagingService.VERSION_40);
buf.putInt(0, -100);
DataInputBuffer in = new DataInputBuffer(out.buffer(), false);
ComponentManifest actual = ComponentManifest.serializer.deserialize(in, MessagingService.VERSION_40);
assertNotEquals(expected, actual);
}
use of org.apache.cassandra.io.util.DataInputBuffer in project cassandra by apache.
the class StreamRequestTest method serializationRoundTrip.
@Test
public void serializationRoundTrip() throws Throwable {
StreamRequest orig = new StreamRequest(ks, atEndpoint(Arrays.asList(range(1, 2), range(3, 4), range(5, 6)), Collections.emptyList()), atEndpoint(Collections.emptyList(), Arrays.asList(range(5, 6), range(7, 8))), Arrays.asList("a", "b", "c"));
int expectedSize = (int) StreamRequest.serializer.serializedSize(orig, version);
try (DataOutputBuffer out = new DataOutputBuffer(expectedSize)) {
StreamRequest.serializer.serialize(orig, out, version);
Assert.assertEquals(expectedSize, out.buffer().limit());
try (DataInputBuffer in = new DataInputBuffer(out.buffer(), false)) {
StreamRequest decoded = StreamRequest.serializer.deserialize(in, version);
Assert.assertEquals(orig.keyspace, decoded.keyspace);
Util.assertRCEquals(orig.full, decoded.full);
Util.assertRCEquals(orig.transientReplicas, decoded.transientReplicas);
Assert.assertEquals(orig.columnFamilies, decoded.columnFamilies);
}
}
}
Aggregations