Search in sources :

Example 36 with DataOutputBuffer

use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.

the class ReadCommandTest method testSinglePartitionGroupMerge.

@Test
public void testSinglePartitionGroupMerge() throws Exception {
    ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF3);
    String[][][] groups = new String[][][] { new String[][] { // "1" indicates to create the data, "-1" to delete the row
    new String[] { "1", "key1", "aa", "a" }, new String[] { "1", "key2", "bb", "b" }, new String[] { "1", "key3", "cc", "c" } }, new String[][] { new String[] { "1", "key3", "dd", "d" }, new String[] { "1", "key2", "ee", "e" }, new String[] { "1", "key1", "ff", "f" } }, new String[][] { new String[] { "1", "key6", "aa", "a" }, new String[] { "1", "key5", "bb", "b" }, new String[] { "1", "key4", "cc", "c" } }, new String[][] { new String[] { "-1", "key6", "aa", "a" }, new String[] { "-1", "key2", "bb", "b" } } };
    // Given the data above, when the keys are sorted and the deletions removed, we should
    // get these clustering rows in this order
    String[] expectedRows = new String[] { "aa", "ff", "ee", "cc", "dd", "cc", "bb" };
    List<ByteBuffer> buffers = new ArrayList<>(groups.length);
    int nowInSeconds = FBUtilities.nowInSeconds();
    ColumnFilter columnFilter = ColumnFilter.allRegularColumnsBuilder(cfs.metadata()).build();
    RowFilter rowFilter = RowFilter.create();
    Slice slice = Slice.make(ClusteringBound.BOTTOM, ClusteringBound.TOP);
    ClusteringIndexSliceFilter sliceFilter = new ClusteringIndexSliceFilter(Slices.with(cfs.metadata().comparator, slice), false);
    for (String[][] group : groups) {
        cfs.truncateBlocking();
        List<SinglePartitionReadCommand> commands = new ArrayList<>(group.length);
        for (String[] data : group) {
            if (data[0].equals("1")) {
                new RowUpdateBuilder(cfs.metadata(), 0, ByteBufferUtil.bytes(data[1])).clustering(data[2]).add(data[3], ByteBufferUtil.bytes("blah")).build().apply();
            } else {
                RowUpdateBuilder.deleteRow(cfs.metadata(), FBUtilities.timestampMicros(), ByteBufferUtil.bytes(data[1]), data[2]).apply();
            }
            commands.add(SinglePartitionReadCommand.create(cfs.metadata(), nowInSeconds, columnFilter, rowFilter, DataLimits.NONE, Util.dk(data[1]), sliceFilter));
        }
        cfs.forceBlockingFlush();
        ReadQuery query = new SinglePartitionReadCommand.Group(commands, DataLimits.NONE);
        try (ReadExecutionController executionController = query.executionController();
            UnfilteredPartitionIterator iter = query.executeLocally(executionController);
            DataOutputBuffer buffer = new DataOutputBuffer()) {
            UnfilteredPartitionIterators.serializerForIntraNode().serialize(iter, columnFilter, buffer, MessagingService.current_version);
            buffers.add(buffer.buffer());
        }
    }
    // deserialize, merge and check the results are all there
    List<UnfilteredPartitionIterator> iterators = new ArrayList<>();
    for (ByteBuffer buffer : buffers) {
        try (DataInputBuffer in = new DataInputBuffer(buffer, true)) {
            iterators.add(UnfilteredPartitionIterators.serializerForIntraNode().deserialize(in, MessagingService.current_version, cfs.metadata(), columnFilter, SerializationHelper.Flag.LOCAL));
        }
    }
    try (PartitionIterator partitionIterator = UnfilteredPartitionIterators.mergeAndFilter(iterators, nowInSeconds, new UnfilteredPartitionIterators.MergeListener() {

        public UnfilteredRowIterators.MergeListener getRowMergeListener(DecoratedKey partitionKey, List<UnfilteredRowIterator> versions) {
            return null;
        }

        public void close() {
        }
    })) {
        int i = 0;
        int numPartitions = 0;
        while (partitionIterator.hasNext()) {
            numPartitions++;
            try (RowIterator rowIterator = partitionIterator.next()) {
                while (rowIterator.hasNext()) {
                    Row row = rowIterator.next();
                    assertEquals("col=" + expectedRows[i++], row.clustering().toString(cfs.metadata()));
                //System.out.print(row.toString(cfs.metadata, true));
                }
            }
        }
        assertEquals(5, numPartitions);
        assertEquals(expectedRows.length, i);
    }
}
Also used : UnfilteredRowIterator(org.apache.cassandra.db.rows.UnfilteredRowIterator) ArrayList(java.util.ArrayList) UnfilteredPartitionIterator(org.apache.cassandra.db.partitions.UnfilteredPartitionIterator) ClusteringIndexSliceFilter(org.apache.cassandra.db.filter.ClusteringIndexSliceFilter) RowFilter(org.apache.cassandra.db.filter.RowFilter) DataOutputBuffer(org.apache.cassandra.io.util.DataOutputBuffer) ColumnFilter(org.apache.cassandra.db.filter.ColumnFilter) ByteBuffer(java.nio.ByteBuffer) UnfilteredPartitionIterators(org.apache.cassandra.db.partitions.UnfilteredPartitionIterators) DataInputBuffer(org.apache.cassandra.io.util.DataInputBuffer) UnfilteredPartitionIterator(org.apache.cassandra.db.partitions.UnfilteredPartitionIterator) PartitionIterator(org.apache.cassandra.db.partitions.PartitionIterator) UnfilteredRowIterator(org.apache.cassandra.db.rows.UnfilteredRowIterator) RowIterator(org.apache.cassandra.db.rows.RowIterator) Row(org.apache.cassandra.db.rows.Row) Test(org.junit.Test)

Example 37 with DataOutputBuffer

use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.

the class ReadMessageTest method serializeAndDeserializeReadMessage.

private ReadCommand serializeAndDeserializeReadMessage(ReadCommand rm) throws IOException {
    IVersionedSerializer<ReadCommand> rms = ReadCommand.serializer;
    DataOutputBuffer out = new DataOutputBuffer();
    rms.serialize(rm, out, MessagingService.current_version);
    DataInputPlus dis = new DataInputBuffer(out.getData());
    return rms.deserialize(dis, MessagingService.current_version);
}
Also used : DataInputBuffer(org.apache.cassandra.io.util.DataInputBuffer) DataOutputBuffer(org.apache.cassandra.io.util.DataOutputBuffer) DataInputPlus(org.apache.cassandra.io.util.DataInputPlus)

Example 38 with DataOutputBuffer

use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.

the class PartitionTest method testManyColumns.

@Test
public void testManyColumns() throws IOException {
    ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(CF_TENCOL);
    RowUpdateBuilder builder = new RowUpdateBuilder(cfs.metadata(), 5, "key1").clustering("c").add("val", "val1");
    for (int i = 0; i < 10; i++) builder.add("val" + i, "val" + i);
    PartitionUpdate update = builder.buildUpdate();
    CachedBTreePartition partition = CachedBTreePartition.create(update.unfilteredIterator(), FBUtilities.nowInSeconds());
    DataOutputBuffer bufOut = new DataOutputBuffer();
    CachedPartition.cacheSerializer.serialize(partition, bufOut);
    CachedPartition deserialized = CachedPartition.cacheSerializer.deserialize(new DataInputBuffer(bufOut.getData()));
    assertEquals(partition.columns().regulars.size(), deserialized.columns().regulars.size());
    assertTrue(deserialized.columns().regulars.getSimple(1).equals(partition.columns().regulars.getSimple(1)));
    assertTrue(deserialized.columns().regulars.getSimple(5).equals(partition.columns().regulars.getSimple(5)));
    ColumnMetadata cDef = cfs.metadata().getColumn(ByteBufferUtil.bytes("val8"));
    assertTrue(partition.lastRow().getCell(cDef).value().equals(deserialized.lastRow().getCell(cDef).value()));
    assert deserialized.partitionKey().equals(partition.partitionKey());
}
Also used : ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) DataInputBuffer(org.apache.cassandra.io.util.DataInputBuffer) DataOutputBuffer(org.apache.cassandra.io.util.DataOutputBuffer) Test(org.junit.Test)

Example 39 with DataOutputBuffer

use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.

the class HintTest method testSerializer.

@Test
public void testSerializer() throws IOException {
    long now = FBUtilities.timestampMicros();
    Mutation mutation = createMutation("testSerializer", now);
    Hint hint = Hint.create(mutation, now / 1000);
    // serialize
    int serializedSize = (int) Hint.serializer.serializedSize(hint, MessagingService.current_version);
    DataOutputBuffer dob = new DataOutputBuffer();
    Hint.serializer.serialize(hint, dob, MessagingService.current_version);
    assertEquals(serializedSize, dob.getLength());
    // deserialize
    DataInputPlus di = new DataInputBuffer(dob.buffer(), true);
    Hint deserializedHint = Hint.serializer.deserialize(di, MessagingService.current_version);
    // compare before/after
    assertHintsEqual(hint, deserializedHint);
}
Also used : DataInputBuffer(org.apache.cassandra.io.util.DataInputBuffer) DataOutputBuffer(org.apache.cassandra.io.util.DataOutputBuffer) DataInputPlus(org.apache.cassandra.io.util.DataInputPlus) Test(org.junit.Test)

Example 40 with DataOutputBuffer

use of org.apache.cassandra.io.util.DataOutputBuffer in project cassandra by apache.

the class HintsDescriptorTest method serializeDescriptor.

private static byte[] serializeDescriptor(HintsDescriptor descriptor) throws IOException {
    DataOutputBuffer dob = new DataOutputBuffer();
    descriptor.serialize(dob);
    return dob.toByteArray();
}
Also used : DataOutputBuffer(org.apache.cassandra.io.util.DataOutputBuffer)

Aggregations

DataOutputBuffer (org.apache.cassandra.io.util.DataOutputBuffer)68 Test (org.junit.Test)25 DataInputBuffer (org.apache.cassandra.io.util.DataInputBuffer)17 IOException (java.io.IOException)16 ByteBuffer (java.nio.ByteBuffer)15 ByteArrayInputStream (java.io.ByteArrayInputStream)10 DataInputStream (java.io.DataInputStream)10 DataInputPlus (org.apache.cassandra.io.util.DataInputPlus)8 File (java.io.File)6 CRC32 (java.util.zip.CRC32)5 InetAddress (java.net.InetAddress)3 ArrayList (java.util.ArrayList)3 DataOutputBufferFixed (org.apache.cassandra.io.util.DataOutputBufferFixed)3 SequentialWriter (org.apache.cassandra.io.util.SequentialWriter)3 Message (org.apache.cassandra.net.Message)3 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 DataOutputStream (java.io.DataOutputStream)2 Map (java.util.Map)2 UUID (java.util.UUID)2 Mutation (org.apache.cassandra.db.Mutation)2