use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class ChecksummedDataInputTest method testReadMethods.
@Test
public void testReadMethods() throws IOException {
// Make sure this array is bigger than the reader buffer size
// so we test updating the crc across buffer boundaries
byte[] b = new byte[RandomAccessReader.DEFAULT_BUFFER_SIZE * 2];
for (int i = 0; i < b.length; i++) b[i] = (byte) i;
ByteBuffer buffer;
// fill a bytebuffer with some input
try (DataOutputBuffer out = new DataOutputBuffer()) {
out.write(127);
out.write(b);
out.writeBoolean(false);
out.writeByte(10);
out.writeChar('t');
out.writeDouble(3.3);
out.writeFloat(2.2f);
out.writeInt(42);
out.writeLong(Long.MAX_VALUE);
out.writeShort(Short.MIN_VALUE);
out.writeUTF("utf");
out.writeVInt(67L);
out.writeUnsignedVInt(88L);
out.writeBytes("abcdefghi");
buffer = out.buffer();
}
// calculate expected CRC
CRC32 crc = new CRC32();
FBUtilities.updateChecksum(crc, buffer);
// save the buffer to file to create a RAR
File file = File.createTempFile("testReadMethods", "1");
file.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(file)) {
writer.write(buffer);
writer.writeInt((int) crc.getValue());
writer.finish();
}
assertTrue(file.exists());
assertEquals(buffer.remaining() + 4, file.length());
try (ChecksummedDataInput reader = ChecksummedDataInput.open(file)) {
reader.limit(buffer.remaining() + 4);
// assert that we read all the right values back
assertEquals(127, reader.read());
byte[] bytes = new byte[b.length];
reader.readFully(bytes);
assertTrue(Arrays.equals(bytes, b));
assertEquals(false, reader.readBoolean());
assertEquals(10, reader.readByte());
assertEquals('t', reader.readChar());
assertEquals(3.3, reader.readDouble());
assertEquals(2.2f, reader.readFloat());
assertEquals(42, reader.readInt());
assertEquals(Long.MAX_VALUE, reader.readLong());
assertEquals(Short.MIN_VALUE, reader.readShort());
assertEquals("utf", reader.readUTF());
assertEquals(67L, reader.readVInt());
assertEquals(88L, reader.readUnsignedVInt());
assertEquals("abcdefghi", new String(ByteBufferUtil.read(reader, 9).array(), StandardCharsets.UTF_8));
// assert that the crc matches, and that we've read exactly as many bytes as expected
assertTrue(reader.checkCrc());
assertTrue(reader.isEOF());
reader.checkLimit(0);
}
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class ChecksummedDataInputTest method testFailedCrc.
@Test
public void testFailedCrc() throws IOException {
CRC32 crc = new CRC32();
ByteBuffer buffer;
// fill a bytebuffer with some input
try (DataOutputBuffer out = new DataOutputBuffer()) {
out.write(127);
out.writeBoolean(false);
out.writeByte(10);
out.writeChar('t');
buffer = out.buffer();
FBUtilities.updateChecksum(crc, buffer);
// update twice so it won't match
FBUtilities.updateChecksum(crc, buffer);
out.writeInt((int) crc.getValue());
buffer = out.buffer();
}
// save the buffer to file to create a RAR
File file = File.createTempFile("testFailedCrc", "1");
file.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(file)) {
writer.write(buffer);
writer.finish();
}
assertTrue(file.exists());
assertEquals(buffer.remaining(), file.length());
try (ChecksummedDataInput reader = ChecksummedDataInput.open(file)) {
reader.limit(buffer.remaining());
// assert that we read all the right values back
assertEquals(127, reader.read());
assertEquals(false, reader.readBoolean());
assertEquals(10, reader.readByte());
assertEquals('t', reader.readChar());
assertFalse(reader.checkCrc());
assertTrue(reader.isEOF());
}
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class HintMessageTest method testSerializer.
@Test
public void testSerializer() throws IOException {
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE, KeyspaceParams.simple(1), SchemaLoader.standardCFMD(KEYSPACE, TABLE));
UUID hostId = UUID.randomUUID();
long now = FBUtilities.timestampMicros();
TableMetadata table = Schema.instance.getTableMetadata(KEYSPACE, TABLE);
Mutation mutation = new RowUpdateBuilder(table, now, bytes("key")).clustering("column").add("val", "val" + 1234).build();
Hint hint = Hint.create(mutation, now / 1000);
HintMessage message = new HintMessage(hostId, hint);
// serialize
int serializedSize = (int) HintMessage.serializer.serializedSize(message, MessagingService.current_version);
DataOutputBuffer dob = new DataOutputBuffer();
HintMessage.serializer.serialize(message, dob, MessagingService.current_version);
assertEquals(serializedSize, dob.getLength());
// deserialize
DataInputPlus di = new DataInputBuffer(dob.buffer(), true);
HintMessage deserializedMessage = HintMessage.serializer.deserialize(di, MessagingService.current_version);
// compare before/after
assertEquals(hostId, deserializedMessage.hostId);
assertHintsEqual(message.hint, deserializedMessage.hint);
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class IndexSummaryTest method testSerialization.
@Test
public void testSerialization() throws IOException {
Pair<List<DecoratedKey>, IndexSummary> random = generateRandomIndex(100, 1);
DataOutputBuffer dos = new DataOutputBuffer();
IndexSummary.serializer.serialize(random.right, dos);
// write junk
dos.writeUTF("JUNK");
dos.writeUTF("JUNK");
FileUtils.closeQuietly(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(dos.toByteArray()));
IndexSummary is = IndexSummary.serializer.deserialize(dis, partitioner, 1, 1);
for (int i = 0; i < 100; i++) assertEquals(i, is.binarySearch(random.left.get(i)));
// read the junk
assertEquals(dis.readUTF(), "JUNK");
assertEquals(dis.readUTF(), "JUNK");
is.close();
FileUtils.closeQuietly(dis);
random.right.close();
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class OnDiskIndexTest method testPrefixSearchWithCONTAINSMode.
@Test
public void testPrefixSearchWithCONTAINSMode() throws Exception {
Map<ByteBuffer, TokenTreeBuilder> data = new HashMap<ByteBuffer, TokenTreeBuilder>() {
{
put(UTF8Type.instance.decompose("lady gaga"), keyBuilder(1L));
// Partial term for 'lady of bells'
DataOutputBuffer ladyOfBellsBuffer = new DataOutputBuffer();
ladyOfBellsBuffer.writeShort(UTF8Type.instance.decompose("lady of bells").remaining() | (1 << OnDiskIndexBuilder.IS_PARTIAL_BIT));
ladyOfBellsBuffer.write(UTF8Type.instance.decompose("lady of bells"));
put(ladyOfBellsBuffer.asNewBuffer(), keyBuilder(2L));
put(UTF8Type.instance.decompose("lady pank"), keyBuilder(3L));
}
};
OnDiskIndexBuilder builder = new OnDiskIndexBuilder(UTF8Type.instance, UTF8Type.instance, OnDiskIndexBuilder.Mode.CONTAINS);
for (Map.Entry<ByteBuffer, TokenTreeBuilder> e : data.entrySet()) addAll(builder, e.getKey(), e.getValue());
File index = File.createTempFile("on-disk-sa-prefix-contains-search", "db");
index.deleteOnExit();
builder.finish(index);
OnDiskIndex onDisk = new OnDiskIndex(index, UTF8Type.instance, new KeyConverter());
// check that lady% return lady gaga (1) and lady pank (3) but not lady of bells(2)
Assert.assertEquals(convert(1, 3), convert(onDisk.search(expressionFor("lady", Operator.LIKE_PREFIX))));
onDisk.close();
}
Aggregations