use of org.apache.cassandra.io.util.SequentialWriter in project cassandra by apache.
the class SSTableHeaderFixTest method buildFakeSSTable.
private File buildFakeSSTable(File dir, int generation, TableMetadata.Builder cols, Function<ColumnMetadata, ColumnMetadata> freezer) {
TableMetadata headerMetadata = cols.build();
TableMetadata.Builder schemaCols = TableMetadata.builder("ks", "cf");
for (ColumnMetadata cm : cols.columns()) schemaCols.addColumn(freezer.apply(cm));
tableMetadata = schemaCols.build();
try {
Descriptor desc = new Descriptor(version, dir, "ks", "cf", generation, SSTableFormat.Type.BIG);
// Just create the component files - we don't really need those.
for (Component component : requiredComponents) assertTrue(new File(desc.filenameFor(component)).createFileIfNotExists());
AbstractType<?> partitionKey = headerMetadata.partitionKeyType;
List<AbstractType<?>> clusteringKey = headerMetadata.clusteringColumns().stream().map(cd -> cd.type).collect(Collectors.toList());
Map<ByteBuffer, AbstractType<?>> staticColumns = headerMetadata.columns().stream().filter(cd -> cd.kind == ColumnMetadata.Kind.STATIC).collect(Collectors.toMap(cd -> cd.name.bytes, cd -> cd.type, (a, b) -> a));
Map<ByteBuffer, AbstractType<?>> regularColumns = headerMetadata.columns().stream().filter(cd -> cd.kind == ColumnMetadata.Kind.REGULAR).collect(Collectors.toMap(cd -> cd.name.bytes, cd -> cd.type, (a, b) -> a));
File statsFile = new File(desc.filenameFor(Component.STATS));
SerializationHeader.Component header = SerializationHeader.Component.buildComponentForTools(partitionKey, clusteringKey, staticColumns, regularColumns, EncodingStats.NO_STATS);
try (SequentialWriter out = new SequentialWriter(statsFile)) {
desc.getMetadataSerializer().serialize(Collections.singletonMap(MetadataType.HEADER, header), out, version);
out.finish();
}
return new File(desc.filenameFor(Component.DATA));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.cassandra.io.util.SequentialWriter in project cassandra by apache.
the class ChecksummedDataInputTest method testResetCrc.
@Test
public void testResetCrc() throws IOException {
CRC32 crc = new CRC32();
ByteBuffer buffer;
// fill a bytebuffer with some input
try (DataOutputBuffer out = new DataOutputBuffer()) {
out.write(127);
out.writeBoolean(false);
out.writeByte(10);
out.writeChar('t');
buffer = out.buffer();
FBUtilities.updateChecksum(crc, buffer);
out.writeInt((int) crc.getValue());
int bufferPos = out.getLength();
out.writeDouble(3.3);
out.writeFloat(2.2f);
out.writeInt(42);
buffer = out.buffer();
buffer.position(bufferPos);
crc.reset();
FBUtilities.updateChecksum(crc, buffer);
out.writeInt((int) crc.getValue());
buffer = out.buffer();
}
// save the buffer to file to create a RAR
File file = FileUtils.createTempFile("testResetCrc", "1");
file.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(file)) {
writer.write(buffer);
writer.finish();
}
assertTrue(file.exists());
assertEquals(buffer.remaining(), file.length());
try (ChecksummedDataInput reader = ChecksummedDataInput.open(file)) {
reader.limit(buffer.remaining());
// assert that we read all the right values back
assertEquals(127, reader.read());
assertEquals(false, reader.readBoolean());
assertEquals(10, reader.readByte());
assertEquals('t', reader.readChar());
assertTrue(reader.checkCrc());
reader.resetCrc();
assertEquals(3.3, reader.readDouble());
assertEquals(2.2f, reader.readFloat());
assertEquals(42, reader.readInt());
assertTrue(reader.checkCrc());
assertTrue(reader.isEOF());
}
}
use of org.apache.cassandra.io.util.SequentialWriter in project cassandra by apache.
the class TokenTreeTest method buildSerializeAndIterate.
public void buildSerializeAndIterate(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokenMap) throws Exception {
builder.finish();
final File treeFile = FileUtils.createTempFile("token-tree-iterate-test1", "tt");
treeFile.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
builder.write(writer);
writer.sync();
}
final RandomAccessReader reader = RandomAccessReader.open(treeFile);
final TokenTree tokenTree = new TokenTree(new MappedBuffer(reader));
final Iterator<Token> tokenIterator = tokenTree.iterator(KEY_CONVERTER);
final Iterator<Map.Entry<Long, LongSet>> listIterator = tokenMap.entrySet().iterator();
while (tokenIterator.hasNext() && listIterator.hasNext()) {
Token treeNext = tokenIterator.next();
Map.Entry<Long, LongSet> listNext = listIterator.next();
Assert.assertEquals(listNext.getKey(), treeNext.get());
Assert.assertEquals(convert(listNext.getValue()), convert(treeNext));
}
Assert.assertFalse("token iterator not finished", tokenIterator.hasNext());
Assert.assertFalse("list iterator not finished", listIterator.hasNext());
reader.close();
}
use of org.apache.cassandra.io.util.SequentialWriter in project cassandra by apache.
the class TokenTreeTest method skipPastEnd.
public void skipPastEnd(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokens) throws Exception {
builder.finish();
final File treeFile = FileUtils.createTempFile("token-tree-skip-past-test", "tt");
treeFile.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
builder.write(writer);
writer.sync();
}
final RandomAccessReader reader = RandomAccessReader.open(treeFile);
final RangeIterator<Long, Token> tokenTree = new TokenTree(new MappedBuffer(reader)).iterator(KEY_CONVERTER);
tokenTree.skipTo(tokens.lastKey() + 10);
}
use of org.apache.cassandra.io.util.SequentialWriter in project cassandra by apache.
the class TokenTreeTest method buildSerializeIterateAndSkip.
// works with maps other than bigTokensMap but skips to a rather large token
// so likely for maps other than bigTokensMap skipping is not tested by this.
public void buildSerializeIterateAndSkip(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokens) throws Exception {
builder.finish();
final File treeFile = FileUtils.createTempFile("token-tree-iterate-test2", "tt");
treeFile.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
builder.write(writer);
writer.sync();
}
final RandomAccessReader reader = RandomAccessReader.open(treeFile);
final TokenTree tokenTree = new TokenTree(new MappedBuffer(reader));
final RangeIterator<Long, Token> treeIterator = tokenTree.iterator(KEY_CONVERTER);
final RangeIterator<Long, TokenWithOffsets> listIterator = new EntrySetSkippableIterator(tokens);
long lastToken = 0L;
while (treeIterator.hasNext() && lastToken < 12) {
Token treeNext = treeIterator.next();
TokenWithOffsets listNext = listIterator.next();
Assert.assertEquals(listNext.token, (lastToken = treeNext.get()));
Assert.assertEquals(convert(listNext.offsets), convert(treeNext));
}
treeIterator.skipTo(100548L);
listIterator.skipTo(100548L);
while (treeIterator.hasNext() && listIterator.hasNext()) {
Token treeNext = treeIterator.next();
TokenWithOffsets listNext = listIterator.next();
Assert.assertEquals(listNext.token, (long) treeNext.get());
Assert.assertEquals(convert(listNext.offsets), convert(treeNext));
}
Assert.assertFalse("Tree iterator not completed", treeIterator.hasNext());
Assert.assertFalse("List iterator not completed", listIterator.hasNext());
reader.close();
}
Aggregations