use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class ByteBufferUtilTest method checkReadWrite.
private void checkReadWrite(ByteBuffer bb) throws IOException {
DataOutputBuffer out = new DataOutputBuffer();
ByteBufferUtil.writeWithLength(bb, out);
ByteBufferUtil.writeWithShortLength(bb, out);
DataInputStream in = new DataInputStream(new ByteArrayInputStream(out.toByteArray()));
assert bb.equals(ByteBufferUtil.readWithLength(in));
assert bb.equals(ByteBufferUtil.readWithShortLength(in));
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class IntervalTreeTest method testSerialization.
@Test
public void testSerialization() throws Exception {
List<Interval<Integer, String>> intervals = new ArrayList<Interval<Integer, String>>();
intervals.add(Interval.<Integer, String>create(-300, -200, "a"));
intervals.add(Interval.<Integer, String>create(-3, -2, "b"));
intervals.add(Interval.<Integer, String>create(1, 2, "c"));
intervals.add(Interval.<Integer, String>create(1, 3, "d"));
intervals.add(Interval.<Integer, String>create(2, 4, "e"));
intervals.add(Interval.<Integer, String>create(3, 6, "f"));
intervals.add(Interval.<Integer, String>create(4, 6, "g"));
intervals.add(Interval.<Integer, String>create(5, 7, "h"));
intervals.add(Interval.<Integer, String>create(8, 9, "i"));
intervals.add(Interval.<Integer, String>create(15, 20, "j"));
intervals.add(Interval.<Integer, String>create(40, 50, "k"));
intervals.add(Interval.<Integer, String>create(49, 60, "l"));
IntervalTree<Integer, String, Interval<Integer, String>> it = IntervalTree.build(intervals);
IVersionedSerializer<IntervalTree<Integer, String, Interval<Integer, String>>> serializer = IntervalTree.serializer(new ISerializer<Integer>() {
public void serialize(Integer i, DataOutputPlus out) throws IOException {
out.writeInt(i);
}
public Integer deserialize(DataInputPlus in) throws IOException {
return in.readInt();
}
public long serializedSize(Integer i) {
return 4;
}
}, new ISerializer<String>() {
public void serialize(String v, DataOutputPlus out) throws IOException {
out.writeUTF(v);
}
public String deserialize(DataInputPlus in) throws IOException {
return in.readUTF();
}
public long serializedSize(String v) {
return v.length();
}
}, (Constructor<Interval<Integer, String>>) (Object) Interval.class.getConstructor(Object.class, Object.class, Object.class));
DataOutputBuffer out = new DataOutputBuffer();
serializer.serialize(it, out, 0);
DataInputPlus in = new DataInputBuffer(out.toByteArray());
IntervalTree<Integer, String, Interval<Integer, String>> it2 = serializer.deserialize(in, 0);
List<Interval<Integer, String>> intervals2 = new ArrayList<Interval<Integer, String>>();
for (Interval<Integer, String> i : it2) intervals2.add(i);
assertEquals(intervals, intervals2);
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class StreamingHistogramTest method testSerDe.
@Test
public void testSerDe() throws Exception {
StreamingHistogram hist = new StreamingHistogram(5, 0, 1);
long[] samples = new long[] { 23, 19, 10, 16, 36, 2, 9 };
// add 7 points to histogram of 5 bins
for (int i = 0; i < samples.length; i++) {
hist.update(samples[i]);
}
DataOutputBuffer out = new DataOutputBuffer();
StreamingHistogram.serializer.serialize(hist, out);
byte[] bytes = out.toByteArray();
StreamingHistogram deserialized = StreamingHistogram.serializer.deserialize(new DataInputBuffer(bytes));
// deserialized histogram should have following values
Map<Double, Long> expected1 = new LinkedHashMap<Double, Long>(5);
expected1.put(2.0, 1L);
expected1.put(9.5, 2L);
expected1.put(17.5, 2L);
expected1.put(23.0, 1L);
expected1.put(36.0, 1L);
Iterator<Map.Entry<Double, Long>> expectedItr = expected1.entrySet().iterator();
for (Map.Entry<Number, long[]> actual : deserialized.getAsMap().entrySet()) {
Map.Entry<Double, Long> entry = expectedItr.next();
assertEquals(entry.getKey(), actual.getKey().doubleValue(), 0.01);
assertEquals(entry.getValue().longValue(), actual.getValue()[0]);
}
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class VIntCodingTest method assertEncodedAtExpectedSize.
private void assertEncodedAtExpectedSize(long value, int expectedSize) throws Exception {
Assert.assertEquals(expectedSize, VIntCoding.computeUnsignedVIntSize(value));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
VIntCoding.writeUnsignedVInt(value, dos);
dos.flush();
Assert.assertEquals(expectedSize, baos.toByteArray().length);
DataOutputBuffer dob = new DataOutputBuffer();
dob.writeUnsignedVInt(value);
Assert.assertEquals(expectedSize, dob.buffer().remaining());
dob.close();
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.
the class VIntCodingTest method testOneByteCapacity.
/*
* Quick sanity check that 1 byte encodes up to 127 as expected
*/
@Test
public void testOneByteCapacity() throws Exception {
int biggestOneByte = 127;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
VIntCoding.writeUnsignedVInt(biggestOneByte, dos);
dos.flush();
Assert.assertEquals(1, baos.toByteArray().length);
DataOutputBuffer dob = new DataOutputBuffer();
dob.writeUnsignedVInt(biggestOneByte);
Assert.assertEquals(1, dob.buffer().remaining());
dob.close();
}
Aggregations