use of net.openhft.chronicle.queue.ExcerptTailer in project Chronicle-Queue by OpenHFT.
the class MessageHistoryTest method shouldAccessMessageHistoryWhenTailerIsMovedToEnd.
@Test
public void shouldAccessMessageHistoryWhenTailerIsMovedToEnd() throws Exception {
try (final SingleChronicleQueue inputQueue = createQueue(inputQueueDir, 1);
final SingleChronicleQueue outputQueue = createQueue(outputQueueDir, 2)) {
generateTestData(inputQueue, outputQueue);
final ExcerptTailer tailer = outputQueue.createTailer();
tailer.direction(TailerDirection.BACKWARD).toEnd();
final ValidatingSecond validatingSecond = new ValidatingSecond();
final MethodReader validator = tailer.methodReader(validatingSecond);
assertThat(validator.readOne(), is(true));
assertThat(validatingSecond.messageHistoryPresent(), is(true));
}
}
use of net.openhft.chronicle.queue.ExcerptTailer in project Chronicle-Queue by OpenHFT.
the class MoveToIndexTest method shouldMoveToPreviousIndexAfterDocumentIsConsumed.
@Test
public void shouldMoveToPreviousIndexAfterDocumentIsConsumed() throws Exception {
File queuePath = tmpFolder.newFolder("cq");
try (ChronicleQueue queue = SingleChronicleQueueBuilder.binary(queuePath).build()) {
ExcerptAppender appender = queue.acquireAppender();
for (int i = 1; i < 10; ++i) {
appender.writeText("id" + i);
}
ExcerptTailer tailer = queue.createTailer();
assertNext(tailer, "id1");
long index = tailer.index();
assertNext(tailer, "id2");
tailer.moveToIndex(index);
assertNext(tailer, "id2");
tailer.moveToIndex(index);
assertNext(tailer, "id2");
}
}
use of net.openhft.chronicle.queue.ExcerptTailer in project Chronicle-Queue by OpenHFT.
the class MultiThreadedRollTest method test.
@Test(timeout = 10000)
public void test() throws ExecutionException, InterruptedException {
final SetTimeProvider timeProvider = new SetTimeProvider();
timeProvider.currentTimeMillis(1000);
final File path = DirectoryUtils.tempDir("MultiThreadedRollTest");
try (final RollingChronicleQueue wqueue = binary(path).testBlockSize().timeProvider(timeProvider).rollCycle(TEST_SECONDLY).build()) {
wqueue.acquireAppender().writeText("hello world");
try (final RollingChronicleQueue rqueue = binary(path).testBlockSize().timeProvider(timeProvider).rollCycle(TEST_SECONDLY).build()) {
ExcerptTailer tailer = rqueue.createTailer();
Future f = reader.submit(() -> {
long index;
do {
try (DocumentContext documentContext = tailer.readingDocument()) {
System.out.println("tailer.state: " + tailer.state());
// index is only meaningful if present.
index = documentContext.index();
// if (documentContext.isPresent())
final boolean present = documentContext.isPresent();
System.out.println("documentContext.isPresent=" + present + (present ? ",index=" + Long.toHexString(index) : ", no index"));
Jvm.pause(50);
}
} while (index != 0x200000000L && !reader.isShutdown());
});
timeProvider.currentTimeMillis(2000);
((SingleChronicleQueueExcerpts.StoreAppender) wqueue.acquireAppender()).writeEndOfCycleIfRequired();
Jvm.pause(200);
wqueue.acquireAppender().writeText("hello world");
f.get();
}
}
}
use of net.openhft.chronicle.queue.ExcerptTailer in project Chronicle-Queue by OpenHFT.
the class NotCompleteTest method testMessageLeftNotComplete.
@Test
public void testMessageLeftNotComplete() {
File tmpDir = DirectoryUtils.tempDir("testMessageLeftNotComplete");
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
ExcerptAppender appender = queue.acquireAppender().lazyIndexing(lazyIndexing);
// start a message which was not completed.
DocumentContext dc = appender.writingDocument();
dc.wire().write("some").text("data");
// didn't call dc.close();
}
final SingleChronicleQueue singleChronicleQueue = null;
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().build()) {
ExcerptTailer tailer = queue.createTailer();
try (DocumentContext dc = tailer.readingDocument()) {
assertFalse(dc.isPresent());
}
String expectedEager = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 0,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 442,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 1\n" + " 544,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 544, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 8, used: 0\n" + " 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 640, header: -1 or 0\n" + "--- !!not-ready-data! #binary\n" + "...\n" + "# 130428 bytes remaining\n";
String expectedLazy = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 0,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 0,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1 or 0\n" + "--- !!not-ready-data! #binary\n" + "...\n" + "# 130626 bytes remaining\n";
assertEquals(lazyIndexing ? expectedLazy : expectedEager, queue.dump());
}
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().timeoutMS(500).build()) {
ExcerptAppender appender = queue.acquireAppender();
try (DocumentContext dc = appender.writingDocument()) {
dc.wire().write("some").text("data");
}
String expectedEager = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 33412,\n" + " 143503447293952\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 442,\n" + " lastIndex: 1\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 1\n" + " 544,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 544, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 8, used: 1\n" + " 33412,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 640, header: -1\n" + "--- !!meta-data #binary\n" + "\"!! Skipped due to recovery of locked header !!";
String expectedEagerFooter = "# position: 33412, header: 0\n" + "--- !!data #binary\n" + "some: data\n" + "...\n" + "# 97642 bytes remaining\n";
String expectedLazy = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 33408,\n" + " 143486267424768\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 33212,\n" + " lastIndex: 1\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "\"!! Skipped due to recovery of locked header !!";
String expectedLazyFooter = "# position: 33212, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 1\n" + " 33312,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 33312, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 8, used: 1\n" + " 33408,\n" + " 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 33408, header: 0\n" + "--- !!data #binary\n" + "some: data\n" + "...\n" + "# 97646 bytes remaining\n";
if (lazyIndexing) {
assertThat(queue.dump(), containsString(expectedLazy));
assertThat(queue.dump(), containsString(expectedLazyFooter));
} else {
assertThat(queue.dump(), containsString(expectedEager));
assertThat(queue.dump(), containsString(expectedEagerFooter));
}
}
}
use of net.openhft.chronicle.queue.ExcerptTailer in project Chronicle-Queue by OpenHFT.
the class NotCompleteTest method testInterruptedDuringSerialisation.
@Test
public void testInterruptedDuringSerialisation() throws InterruptedException {
final File tmpDir = DirectoryUtils.tempDir("testInterruptedDuringSerialisation_" + (lazyIndexing ? "lazy" : "eager"));
DirectoryUtils.deleteDir(tmpDir);
tmpDir.mkdirs();
final List<String> names = Collections.synchronizedList(new ArrayList<>());
final Person person1 = new Person(40, "Terry");
final Person interrupter = new Person(50, Person.INTERRUPT);
final Person thrower = new Person(80, Person.THROW);
final Person person2 = new Person(90, "Bert");
try (final ChronicleQueue queueReader = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).timeoutMS(500).build();
final ChronicleQueue queueWriter = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
ExcerptTailer tailer = queueReader.createTailer();
MethodReader reader = tailer.methodReader((PersonListener) person -> names.add(person.name));
final StringBuilder queueDumpBeforeInterruptedWrite = new StringBuilder();
// set up
doWrite(queueWriter, (proxy, queue) -> {
proxy.accept(person1);
queueDumpBeforeInterruptedWrite.append(queue.dump());
});
String cleanedQueueDump = cleanQueueDump(queueDumpBeforeInterruptedWrite.toString());
// start up writer thread
Thread writerThread = new Thread(() -> doWrite(queueWriter, (proxy, queue) -> {
// thread is interrupted during this
proxy.accept(interrupter);
}));
writerThread.start();
writerThread.join();
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
String dump = cleanQueueDump(queue.dump());
assertEquals("queue should be unchanged by the interrupted write", cleanedQueueDump, dump);
}
// check only 1 written
assertTrue(reader.readOne());
assertEquals(1, names.size());
assertEquals(person1.name, names.get(0));
assertFalse(reader.readOne());
// do a write that throws an exception
doWrite(queueWriter, (proxy, queue) -> {
try {
proxy.accept(thrower);
} catch (NullPointerException npe) {
// ignore
}
});
try (final ChronicleQueue queue = binary(tmpDir).testBlockSize().rollCycle(RollCycles.TEST_DAILY).build()) {
String dump = cleanQueueDump(queue.dump());
if (lazyIndexing) {
// reading the queue creates the index, thus changing it, so do a text comparison here
cleanedQueueDump = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 442,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 8,\n" + " indexSpacing: 1,\n" + " index2Index: 475,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: 0\n" + "--- !!data #binary\n" + "accept: {\n" + " age: 40,\n" + " name: Terry\n" + "}\n" + "# position: 475, header: 0\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 8, used: 0\n" + " 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "...\n" + "\n";
}
assertEquals("queue should be unchanged by the failed write", cleanedQueueDump, dump);
}
// check nothing else written
assertFalse(reader.readOne());
// do an empty write
ExcerptAppender appender = queueWriter.acquireAppender().lazyIndexing(lazyIndexing);
DocumentContext wd = appender.writingDocument();
wd.rollbackOnClose();
wd.close();
// check queue unchanged
String dump = cleanQueueDump(queueWriter.dump());
assertEquals("queue should be unchanged by the failed write", cleanedQueueDump, dump);
// check nothing else written
assertFalse(reader.readOne());
// write another person to same queue in this thread
doWrite(queueWriter, (proxy, queue) -> proxy.accept(person2));
assertTrue(reader.readOne());
assertEquals(2, names.size());
assertEquals(person2.name, names.get(1));
assertFalse(reader.readOne());
}
}
Aggregations