use of org.apache.cassandra.io.util.DataOutputBuffer in project eiger by wlloyd.
the class LazilyCompactedRowTest method assertBytes.
private static void assertBytes(Collection<SSTableReader> sstables, AbstractCompactionIterable ci1, AbstractCompactionIterable ci2) throws IOException {
CloseableIterator<AbstractCompactedRow> iter1 = ci1.iterator();
CloseableIterator<AbstractCompactedRow> iter2 = ci2.iterator();
while (true) {
if (!iter1.hasNext()) {
assert !iter2.hasNext();
break;
}
AbstractCompactedRow row1 = iter1.next();
AbstractCompactedRow row2 = iter2.next();
DataOutputBuffer out1 = new DataOutputBuffer();
DataOutputBuffer out2 = new DataOutputBuffer();
row1.write(out1);
row2.write(out2);
File tmpFile1 = File.createTempFile("lcrt1", null);
File tmpFile2 = File.createTempFile("lcrt2", null);
tmpFile1.deleteOnExit();
tmpFile2.deleteOnExit();
// writing data from row1
new FileOutputStream(tmpFile1).write(out1.getData());
// writing data from row2
new FileOutputStream(tmpFile2).write(out2.getData());
MappedFileDataInput in1 = new MappedFileDataInput(new FileInputStream(tmpFile1), tmpFile1.getAbsolutePath(), 0);
MappedFileDataInput in2 = new MappedFileDataInput(new FileInputStream(tmpFile2), tmpFile2.getAbsolutePath(), 0);
// key isn't part of what CompactedRow writes, that's done by SSTW.append
// row size can differ b/c of bloom filter counts being different
long rowSize1 = SSTableReader.readRowSize(in1, sstables.iterator().next().descriptor);
long rowSize2 = SSTableReader.readRowSize(in2, sstables.iterator().next().descriptor);
assertEquals(rowSize1 + 8, out1.getLength());
assertEquals(rowSize2 + 8, out2.getLength());
// bloom filter
IndexHelper.defreezeBloomFilter(in1, rowSize1, false);
IndexHelper.defreezeBloomFilter(in2, rowSize2, false);
// index
int indexSize1 = in1.readInt();
int indexSize2 = in2.readInt();
assertEquals(indexSize1, indexSize2);
ByteBuffer bytes1 = in1.readBytes(indexSize1);
ByteBuffer bytes2 = in2.readBytes(indexSize2);
assert bytes1.equals(bytes2);
// cf metadata
ColumnFamily cf1 = ColumnFamily.create("Keyspace1", "Standard1");
ColumnFamily cf2 = ColumnFamily.create("Keyspace1", "Standard1");
ColumnFamily.serializer().deserializeFromSSTableNoColumns(cf1, in1);
ColumnFamily.serializer().deserializeFromSSTableNoColumns(cf2, in2);
assert cf1.getLocalDeletionTime() == cf2.getLocalDeletionTime();
assert cf1.getMarkedForDeleteAt() == cf2.getMarkedForDeleteAt();
// columns
int columns = in1.readInt();
assert columns == in2.readInt();
for (int i = 0; i < columns; i++) {
IColumn c1 = cf1.getColumnSerializer().deserialize(in1);
IColumn c2 = cf2.getColumnSerializer().deserialize(in2);
assert c1.equals(c2);
}
// that should be everything
assert in1.available() == 0;
assert in2.available() == 0;
}
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project eiger by wlloyd.
the class GossipDigestTest method test.
@Test
public void test() throws IOException {
InetAddress endpoint = InetAddress.getByName("127.0.0.1");
int generation = 0;
int maxVersion = 123;
GossipDigest expected = new GossipDigest(endpoint, generation, maxVersion);
//make sure we get the same values out
assertEquals(endpoint, expected.getEndpoint());
assertEquals(generation, expected.getGeneration());
assertEquals(maxVersion, expected.getMaxVersion());
//test the serialization and equals
DataOutputBuffer output = new DataOutputBuffer();
GossipDigest.serializer().serialize(expected, output, MessagingService.version_);
ByteArrayInputStream input = new ByteArrayInputStream(output.getData(), 0, output.getLength());
GossipDigest actual = GossipDigest.serializer().deserialize(new DataInputStream(input), MessagingService.version_);
assertEquals(0, expected.compareTo(actual));
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project eiger by wlloyd.
the class ColumnFamilyTest method testManyColumns.
@Test
public void testManyColumns() throws IOException {
ColumnFamily cf;
TreeMap<String, String> map = new TreeMap<String, String>();
for (int i = 100; i < 1000; ++i) {
map.put(Integer.toString(i), "Avinash Lakshman is a good man: " + i);
}
// write
cf = ColumnFamily.create("Keyspace1", "Standard1");
DataOutputBuffer bufOut = new DataOutputBuffer();
for (String cName : map.navigableKeySet()) {
cf.addColumn(column(cName, map.get(cName), 314));
}
ColumnFamily.serializer().serialize(cf, bufOut);
// verify
ByteArrayInputStream bufIn = new ByteArrayInputStream(bufOut.getData(), 0, bufOut.getLength());
cf = ColumnFamily.serializer().deserialize(new DataInputStream(bufIn));
for (String cName : map.navigableKeySet()) {
ByteBuffer val = cf.getColumn(ByteBufferUtil.bytes(cName)).value();
assert new String(val.array(), val.position(), val.remaining()).equals(map.get(cName));
}
assert cf.getColumnNames().size() == map.size();
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project eiger by wlloyd.
the class ColumnFamilyTest method testSingleColumn.
// TODO test SuperColumns more
@Test
public void testSingleColumn() throws IOException {
ColumnFamily cf;
cf = ColumnFamily.create("Keyspace1", "Standard1");
cf.addColumn(column("C", "v", 1));
DataOutputBuffer bufOut = new DataOutputBuffer();
ColumnFamily.serializer().serialize(cf, bufOut);
ByteArrayInputStream bufIn = new ByteArrayInputStream(bufOut.getData(), 0, bufOut.getLength());
cf = ColumnFamily.serializer().deserialize(new DataInputStream(bufIn));
assert cf != null;
assert cf.metadata().cfName.equals("Standard1");
assert cf.getSortedColumns().size() == 1;
}
use of org.apache.cassandra.io.util.DataOutputBuffer in project eiger by wlloyd.
the class ReadMessageTest method serializeAndDeserializeReadMessage.
private ReadCommand serializeAndDeserializeReadMessage(ReadCommand rm) throws IOException {
ReadCommandSerializer rms = ReadCommand.serializer();
DataOutputBuffer dos = new DataOutputBuffer();
ByteArrayInputStream bis;
rms.serialize(rm, dos, MessagingService.version_);
bis = new ByteArrayInputStream(dos.getData(), 0, dos.getLength());
return rms.deserialize(new DataInputStream(bis), MessagingService.version_);
}
Aggregations