use of org.apache.cassandra.io.util.DataInputPlus in project cassandra by apache.
the class BatchlogTest method testSerialization.
@Test
public void testSerialization() throws IOException {
TableMetadata cfm = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF_STANDARD).metadata();
long now = FBUtilities.timestampMicros();
int version = MessagingService.current_version;
UUID uuid = UUIDGen.getTimeUUID();
List<Mutation> mutations = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
mutations.add(new RowUpdateBuilder(cfm, FBUtilities.timestampMicros(), bytes(i)).clustering("name" + i).add("val", "val" + i).build());
}
Batch batch1 = Batch.createLocal(uuid, now, mutations);
assertEquals(uuid, batch1.id);
assertEquals(now, batch1.creationTime);
assertEquals(mutations, batch1.decodedMutations);
DataOutputBuffer out = new DataOutputBuffer();
Batch.serializer.serialize(batch1, out, version);
assertEquals(out.getLength(), Batch.serializer.serializedSize(batch1, version));
DataInputPlus dis = new DataInputBuffer(out.getData());
Batch batch2 = Batch.serializer.deserialize(dis, version);
assertEquals(batch1.id, batch2.id);
assertEquals(batch1.creationTime, batch2.creationTime);
assertEquals(batch1.decodedMutations.size(), batch2.encodedMutations.size());
Iterator<Mutation> it1 = batch1.decodedMutations.iterator();
Iterator<ByteBuffer> it2 = batch2.encodedMutations.iterator();
while (it1.hasNext()) {
try (DataInputBuffer in = new DataInputBuffer(it2.next().array())) {
assertEquals(it1.next().toString(), Mutation.serializer.deserialize(in, version).toString());
}
}
}
use of org.apache.cassandra.io.util.DataInputPlus in project cassandra by apache.
the class HintMessageTest method testSerializer.
@Test
public void testSerializer() throws IOException {
UUID hostId = UUID.randomUUID();
long now = FBUtilities.timestampMicros();
TableMetadata table = Schema.instance.getTableMetadata(KEYSPACE, TABLE);
Mutation mutation = new RowUpdateBuilder(table, now, bytes("key")).clustering("column").add("val", "val" + 1234).build();
Hint hint = Hint.create(mutation, now / 1000);
HintMessage message = new HintMessage(hostId, hint);
// serialize
int serializedSize = (int) HintMessage.serializer.serializedSize(message, MessagingService.current_version);
HintMessage deserializedMessage;
try (DataOutputBuffer dob = new DataOutputBuffer()) {
HintMessage.serializer.serialize(message, dob, MessagingService.current_version);
assertEquals(serializedSize, dob.getLength());
// deserialize
DataInputPlus di = new DataInputBuffer(dob.buffer(), true);
deserializedMessage = HintMessage.serializer.deserialize(di, MessagingService.current_version);
}
// compare before/after
assertEquals(hostId, deserializedMessage.hostId);
assertNotNull(deserializedMessage.hint);
assertHintsEqual(hint, deserializedMessage.hint);
}
use of org.apache.cassandra.io.util.DataInputPlus in project cassandra by apache.
the class HintTest method testSerializer.
@Test
public void testSerializer() throws IOException {
long now = FBUtilities.timestampMicros();
Mutation mutation = createMutation("testSerializer", now);
Hint hint = Hint.create(mutation, now / 1000);
// serialize
int serializedSize = (int) Hint.serializer.serializedSize(hint, MessagingService.current_version);
DataOutputBuffer dob = new DataOutputBuffer();
Hint.serializer.serialize(hint, dob, MessagingService.current_version);
assertEquals(serializedSize, dob.getLength());
// deserialize
DataInputPlus di = new DataInputBuffer(dob.buffer(), true);
Hint deserializedHint = Hint.serializer.deserialize(di, MessagingService.current_version);
// compare before/after
assertHintsEqual(hint, deserializedHint);
}
use of org.apache.cassandra.io.util.DataInputPlus in project cassandra by apache.
the class BigTableZeroCopyWriterTest method getSSTableComponentData.
private Pair<DataInputPlus, Long> getSSTableComponentData(SSTableReader sstable, Component component, Function<ByteBuffer, DataInputPlus> bufferMapper) {
FileHandle componentFile = new FileHandle.Builder(sstable.descriptor.filenameFor(component)).bufferSize(1024).complete();
ByteBuffer buffer = ByteBuffer.allocate((int) componentFile.channel.size());
componentFile.channel.read(buffer, 0);
buffer.flip();
DataInputPlus inputPlus = bufferMapper.apply(buffer);
return Pair.create(inputPlus, componentFile.channel.size());
}
use of org.apache.cassandra.io.util.DataInputPlus in project cassandra by apache.
the class BytesReadTrackerTest method internalTestReadLine.
public void internalTestReadLine(boolean inputStream) throws Exception {
DataInputStream in = new DataInputStream(new ByteArrayInputStream("1".getBytes()));
BytesReadTracker tracker = inputStream ? new TrackedInputStream(in) : new TrackedDataInputPlus(in);
DataInputPlus reader = inputStream ? new DataInputPlus.DataInputStreamPlus((TrackedInputStream) tracker) : (DataInputPlus) tracker;
try {
String line = reader.readLine();
if (inputStream)
assertEquals(line, "1");
else
fail("Should have thrown UnsupportedOperationException");
} catch (UnsupportedOperationException e) {
if (inputStream)
fail("Should have not thrown UnsupportedOperationException");
} finally {
in.close();
}
}
Aggregations