use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class ChecksummedDataInputTest method testResetCrc.
@Test
public void testResetCrc() throws IOException {
CRC32 crc = new CRC32();
ByteBuffer buffer;
// fill a bytebuffer with some input
try (DataOutputBuffer out = new DataOutputBuffer()) {
out.write(127);
out.writeBoolean(false);
out.writeByte(10);
out.writeChar('t');
buffer = out.buffer();
FBUtilities.updateChecksum(crc, buffer);
out.writeInt((int) crc.getValue());
int bufferPos = out.getLength();
out.writeDouble(3.3);
out.writeFloat(2.2f);
out.writeInt(42);
buffer = out.buffer();
buffer.position(bufferPos);
crc.reset();
FBUtilities.updateChecksum(crc, buffer);
out.writeInt((int) crc.getValue());
buffer = out.buffer();
}
// save the buffer to file to create a RAR
File file = File.createTempFile("testResetCrc", "1");
file.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(file)) {
writer.write(buffer);
writer.finish();
}
assertTrue(file.exists());
assertEquals(buffer.remaining(), file.length());
try (ChecksummedDataInput reader = ChecksummedDataInput.open(file)) {
reader.limit(buffer.remaining());
// assert that we read all the right values back
assertEquals(127, reader.read());
assertEquals(false, reader.readBoolean());
assertEquals(10, reader.readByte());
assertEquals('t', reader.readChar());
assertTrue(reader.checkCrc());
reader.resetCrc();
assertEquals(3.3, reader.readDouble());
assertEquals(2.2f, reader.readFloat());
assertEquals(42, reader.readInt());
assertTrue(reader.checkCrc());
assertTrue(reader.isEOF());
}
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class GossipDigestTest method test.
@Test
public void test() throws IOException {
InetAddress endpoint = InetAddress.getByName("127.0.0.1");
int generation = 0;
int maxVersion = 123;
GossipDigest expected = new GossipDigest(endpoint, generation, maxVersion);
//make sure we get the same values out
assertEquals(endpoint, expected.getEndpoint());
assertEquals(generation, expected.getGeneration());
assertEquals(maxVersion, expected.getMaxVersion());
//test the serialization and equals
DataOutputBuffer output = new DataOutputBuffer();
GossipDigest.serializer.serialize(expected, output, MessagingService.current_version);
DataInputPlus input = new DataInputBuffer(output.getData());
GossipDigest actual = GossipDigest.serializer.deserialize(input, MessagingService.current_version);
assertEquals(0, expected.compareTo(actual));
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class IndexSummaryTest method testAddEmptyKey.
@Test
public void testAddEmptyKey() throws Exception {
IPartitioner p = new RandomPartitioner();
try (IndexSummaryBuilder builder = new IndexSummaryBuilder(1, 1, BASE_SAMPLING_LEVEL)) {
builder.maybeAddEntry(p.decorateKey(ByteBufferUtil.EMPTY_BYTE_BUFFER), 0);
IndexSummary summary = builder.build(p);
assertEquals(1, summary.size());
assertEquals(0, summary.getPosition(0));
assertArrayEquals(new byte[0], summary.getKey(0));
DataOutputBuffer dos = new DataOutputBuffer();
IndexSummary.serializer.serialize(summary, dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(dos.toByteArray()));
IndexSummary loaded = IndexSummary.serializer.deserialize(dis, p, 1, 1);
assertEquals(1, loaded.size());
assertEquals(summary.getPosition(0), loaded.getPosition(0));
assertArrayEquals(summary.getKey(0), summary.getKey(0));
summary.close();
loaded.close();
}
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class MerkleTreesTest method testSerialization.
@Test
public void testSerialization() throws Exception {
Range<Token> first = new Range<>(tok(3), tok(4));
Collection<Range<Token>> ranges = new ArrayList<>();
ranges.add(first);
ranges.add(new Range<Token>(tok(5), tok(2)));
mts = new MerkleTrees(partitioner);
mts.addMerkleTrees(256, ranges);
// populate and validate the tree
mts.init();
for (TreeRange range : mts.invalids()) range.addAll(new HIterator(range.right));
byte[] initialhash = mts.hash(first);
long serializedSize = MerkleTrees.serializer.serializedSize(mts, MessagingService.current_version);
DataOutputBuffer out = new DataOutputBuffer();
MerkleTrees.serializer.serialize(mts, out, MessagingService.current_version);
byte[] serialized = out.toByteArray();
assertEquals(serializedSize, serialized.length);
DataInputBuffer in = new DataInputBuffer(serialized);
MerkleTrees restored = MerkleTrees.serializer.deserialize(in, MessagingService.current_version);
assertHashEquals(initialhash, restored.hash(first));
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class BloomFilterTest method testSerialize.
public static IFilter testSerialize(IFilter f) throws IOException {
f.add(FilterTestHelper.bytes("a"));
DataOutputBuffer out = new DataOutputBuffer();
FilterFactory.serialize(f, out);
ByteArrayInputStream in = new ByteArrayInputStream(out.getData(), 0, out.getLength());
IFilter f2 = FilterFactory.deserialize(new DataInputStream(in), true);
assert f2.isPresent(FilterTestHelper.bytes("a"));
assert !f2.isPresent(FilterTestHelper.bytes("b"));
return f2;
}
Aggregations