use of net.openhft.chronicle.queue.ChronicleQueue in project Chronicle-Queue by OpenHFT.
the class SingleChronicleQueueTest method testLastWrittenIndexPerAppenderNoData.
@Test(expected = IllegalStateException.class)
public void testLastWrittenIndexPerAppenderNoData() {
final File file = createTempFile("testLastWrittenIndexPerAppenderNoData");
try {
final ChronicleQueue chronicle = createQueue(file);
final ExcerptAppender appender = chronicle.acquireAppender();
appender.lastWrittenIndex();
} finally {
file.delete();
}
}
use of net.openhft.chronicle.queue.ChronicleQueue in project Chronicle-Queue by OpenHFT.
the class ExcerptsSkippedWhenTailerDirectionNoneTest method shouldNotSkipMessageAtStartOfQueue.
@Test
public void shouldNotSkipMessageAtStartOfQueue() throws Exception {
final File tmpDir = DirectoryUtils.tempDir(ExcerptsSkippedWhenTailerDirectionNoneTest.class.getSimpleName());
try (final ChronicleQueue writeQueue = SingleChronicleQueueBuilder.binary(tmpDir).testBlockSize().rollCycle(TEST_DAILY).build()) {
final ExcerptAppender excerptAppender = writeQueue.acquireAppender();
try (final DocumentContext ctx = excerptAppender.writingDocument()) {
ctx.wire().getValueOut().object("first");
}
try (final DocumentContext ctx = excerptAppender.writingDocument()) {
ctx.wire().getValueOut().object("second");
}
}
try (final SingleChronicleQueue readQueue = SingleChronicleQueueBuilder.binary(tmpDir).testBlockSize().rollCycle(TEST_DAILY).build()) {
final ExcerptTailer tailer = readQueue.createTailer();
final RollCycle rollCycle = readQueue.rollCycle();
assertThat(rollCycle.toSequenceNumber(tailer.index()), is(0L));
try (final DocumentContext ctx = tailer.direction(TailerDirection.NONE).readingDocument()) {
// access the first document without incrementing sequence number
}
assertThat(rollCycle.toSequenceNumber(tailer.index()), is(0L));
String value;
try (DocumentContext dc = tailer.direction(TailerDirection.FORWARD).readingDocument()) {
ValueIn valueIn = dc.wire().getValueIn();
value = (String) valueIn.object();
}
assertThat(rollCycle.toSequenceNumber(tailer.index()), is(1L));
assertThat(value, is("first"));
try (DocumentContext dc = tailer.direction(TailerDirection.NONE).readingDocument()) {
ValueIn valueIn = dc.wire().getValueIn();
value = (String) valueIn.object();
}
assertThat(rollCycle.toSequenceNumber(tailer.index()), is(1L));
assertThat(value, is("second"));
try (DocumentContext dc = tailer.direction(TailerDirection.NONE).readingDocument()) {
ValueIn valueIn = dc.wire().getValueIn();
value = (String) valueIn.object();
}
assertThat(rollCycle.toSequenceNumber(tailer.index()), is(1L));
assertThat(value, is("second"));
}
}
use of net.openhft.chronicle.queue.ChronicleQueue in project Chronicle-Queue by OpenHFT.
the class MoveToIndexTest method shouldMoveToPreviousIndexAfterDocumentIsConsumed.
@Test
public void shouldMoveToPreviousIndexAfterDocumentIsConsumed() throws Exception {
File queuePath = tmpFolder.newFolder("cq");
try (ChronicleQueue queue = SingleChronicleQueueBuilder.binary(queuePath).build()) {
ExcerptAppender appender = queue.acquireAppender();
for (int i = 1; i < 10; ++i) {
appender.writeText("id" + i);
}
ExcerptTailer tailer = queue.createTailer();
assertNext(tailer, "id1");
long index = tailer.index();
assertNext(tailer, "id2");
tailer.moveToIndex(index);
assertNext(tailer, "id2");
tailer.moveToIndex(index);
assertNext(tailer, "id2");
}
}
use of net.openhft.chronicle.queue.ChronicleQueue in project Chronicle-Queue by OpenHFT.
the class NotCompleteTest method testMessageLeftNotComplete.
@Test
public void testMessageLeftNotComplete() {
File tmpDir = DirectoryUtils.tempDir("testMessageLeftNotComplete");
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
ExcerptAppender appender = queue.acquireAppender().lazyIndexing(lazyIndexing);
// start a message which was not completed.
DocumentContext dc = appender.writingDocument();
dc.wire().write("some").text("data");
// didn't call dc.close();
}
final SingleChronicleQueue singleChronicleQueue = null;
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().build()) {
ExcerptTailer tailer = queue.createTailer();
try (DocumentContext dc = tailer.readingDocument()) {
assertFalse(dc.isPresent());
}
String expectedEager = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 0,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 442,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 1\n" + " 544,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 544, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 8, used: 0\n" + " 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 640, header: -1 or 0\n" + "--- !!not-ready-data! #binary\n" + "...\n" + "# 130428 bytes remaining\n";
String expectedLazy = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 0,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 0,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1 or 0\n" + "--- !!not-ready-data! #binary\n" + "...\n" + "# 130626 bytes remaining\n";
assertEquals(lazyIndexing ? expectedLazy : expectedEager, queue.dump());
}
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().timeoutMS(500).build()) {
ExcerptAppender appender = queue.acquireAppender();
try (DocumentContext dc = appender.writingDocument()) {
dc.wire().write("some").text("data");
}
String expectedEager = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 33412,\n" + " 143503447293952\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 442,\n" + " lastIndex: 1\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 1\n" + " 544,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 544, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 8, used: 1\n" + " 33412,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 640, header: -1\n" + "--- !!meta-data #binary\n" + "\"!! Skipped due to recovery of locked header !!";
String expectedEagerFooter = "# position: 33412, header: 0\n" + "--- !!data #binary\n" + "some: data\n" + "...\n" + "# 97642 bytes remaining\n";
String expectedLazy = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 33408,\n" + " 143486267424768\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 33212,\n" + " lastIndex: 1\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "\"!! Skipped due to recovery of locked header !!";
String expectedLazyFooter = "# position: 33212, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 1\n" + " 33312,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 33312, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 8, used: 1\n" + " 33408,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 33408, header: 0\n" + "--- !!data #binary\n" + "some: data\n" + "...\n" + "# 97646 bytes remaining\n";
if (lazyIndexing) {
assertThat(queue.dump(), containsString(expectedLazy));
assertThat(queue.dump(), containsString(expectedLazyFooter));
} else {
assertThat(queue.dump(), containsString(expectedEager));
assertThat(queue.dump(), containsString(expectedEagerFooter));
}
}
}
use of net.openhft.chronicle.queue.ChronicleQueue in project Chronicle-Queue by OpenHFT.
the class NotCompleteTest method testInterruptedDuringSerialisation.
@Test
public void testInterruptedDuringSerialisation() throws InterruptedException {
final File tmpDir = DirectoryUtils.tempDir("testInterruptedDuringSerialisation_" + (lazyIndexing ? "lazy" : "eager"));
DirectoryUtils.deleteDir(tmpDir);
tmpDir.mkdirs();
final List<String> names = Collections.synchronizedList(new ArrayList<>());
final Person person1 = new Person(40, "Terry");
final Person interrupter = new Person(50, Person.INTERRUPT);
final Person thrower = new Person(80, Person.THROW);
final Person person2 = new Person(90, "Bert");
try (final ChronicleQueue queueReader = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).timeoutMS(500).build();
final ChronicleQueue queueWriter = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
ExcerptTailer tailer = queueReader.createTailer();
MethodReader reader = tailer.methodReader((PersonListener) person -> names.add(person.name));
final StringBuilder queueDumpBeforeInterruptedWrite = new StringBuilder();
// set up
doWrite(queueWriter, (proxy, queue) -> {
proxy.accept(person1);
queueDumpBeforeInterruptedWrite.append(queue.dump());
});
String cleanedQueueDump = cleanQueueDump(queueDumpBeforeInterruptedWrite.toString());
// start up writer thread
Thread writerThread = new Thread(() -> doWrite(queueWriter, (proxy, queue) -> {
// thread is interrupted during this
proxy.accept(interrupter);
}));
writerThread.start();
writerThread.join();
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
String dump = cleanQueueDump(queue.dump());
assertEquals("queue should be unchanged by the interrupted write", cleanedQueueDump, dump);
}
// check only 1 written
assertTrue(reader.readOne());
assertEquals(1, names.size());
assertEquals(person1.name, names.get(0));
assertFalse(reader.readOne());
// do a write that throws an exception
doWrite(queueWriter, (proxy, queue) -> {
try {
proxy.accept(thrower);
} catch (NullPointerException npe) {
// ignore
}
});
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
String dump = cleanQueueDump(queue.dump());
if (lazyIndexing) {
// reading the queue creates the index, thus changing it, so do a text comparison here
cleanedQueueDump = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 442,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 475,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: 0\n" + "--- !!data #binary\n" + "accept: {\n" + " age: 40,\n" + " name: Terry\n" + "}\n" + "# position: 475, header: 0\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 0\n" + " 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "...\n" + "\n";
}
assertEquals("queue should be unchanged by the failed write", cleanedQueueDump, dump);
}
// check nothing else written
assertFalse(reader.readOne());
// do an empty write
ExcerptAppender appender = queueWriter.acquireAppender().lazyIndexing(lazyIndexing);
DocumentContext wd = appender.writingDocument();
wd.rollbackOnClose();
wd.close();
// check queue unchanged
String dump = cleanQueueDump(queueWriter.dump());
assertEquals("queue should be unchanged by the failed write", cleanedQueueDump, dump);
// check nothing else written
assertFalse(reader.readOne());
// write another person to same queue in this thread
doWrite(queueWriter, (proxy, queue) -> proxy.accept(person2));
assertTrue(reader.readOne());
assertEquals(2, names.size());
assertEquals(person2.name, names.get(1));
assertFalse(reader.readOne());
}
}
Aggregations