use of net.openhft.chronicle.queue.impl.single.SingleChronicleQueue in project Chronicle-Queue by OpenHFT.
the class RollingChronicleQueueTest method testCountExcerptsWhenTheCycleIsRolled.
@Test
public void testCountExcerptsWhenTheCycleIsRolled() {
final AtomicLong time = new AtomicLong();
File name = DirectoryUtils.tempDir("testCountExcerptsWhenTheCycleIsRolled");
try (final SingleChronicleQueue q = binary(name).testBlockSize().timeProvider(time::get).rollCycle(TEST2_DAILY).build()) {
final ExcerptAppender appender = q.acquireAppender().lazyIndexing(lazyIndexing);
time.set(0);
appender.writeText("1. some text");
long start = appender.lastIndexAppended();
appender.writeText("2. some more text");
appender.writeText("3. some more text");
time.set(TimeUnit.DAYS.toMillis(1));
appender.writeText("4. some text - first cycle");
time.set(TimeUnit.DAYS.toMillis(2));
// large gap to miss a cycle file
time.set(TimeUnit.DAYS.toMillis(3));
time.set(TimeUnit.DAYS.toMillis(4));
appender.writeText("5. some text - second cycle");
appender.writeText("some more text");
long end = appender.lastIndexAppended();
String expectedEager = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 806,\n" + " 3461743640578\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 16,\n" + " indexSpacing: 2,\n" + " index2Index: 442,\n" + " lastIndex: 4\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 16, used: 1\n" + " 608,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 608, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 16, used: 2\n" + " 768,\n" + " 806,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 768, header: 0\n" + "--- !!data\n" + "1. some text\n" + "# position: 785, header: 1\n" + "--- !!data\n" + "2. some more text\n" + "# position: 806, header: 2\n" + "--- !!data\n" + "3. some more text\n" + "# position: 827, header: 2 EOF\n" + "--- !!not-ready-meta-data! #binary\n" + "...\n" + "# 130241 bytes remaining\n" + "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 768,\n" + " 3298534883328\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 16,\n" + " indexSpacing: 2,\n" + " index2Index: 442,\n" + " lastIndex: 2\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 16, used: 1\n" + " 608,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 608, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 16, used: 1\n" + " 768,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 768, header: 0\n" + "--- !!data\n" + "4. some text - first cycle\n" + "# position: 798, header: 0 EOF\n" + "--- !!not-ready-meta-data! #binary\n" + "...\n" + "# 130270 bytes remaining\n" + "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 799,\n" + " 3431678869505\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 16,\n" + " indexSpacing: 2,\n" + " index2Index: 442,\n" + " lastIndex: 2\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: -1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 16, used: 1\n" + " 608,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 608, header: -1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 16, used: 1\n" + " 768,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 768, header: 0\n" + "--- !!data\n" + "5. some text - second cycle\n" + "# position: 799, header: 1\n" + "--- !!data\n" + "some more text\n" + "...\n" + "# 130251 bytes remaining\n";
String expectedLazy = "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 805,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 16,\n" + " indexSpacing: 2,\n" + " index2Index: 459,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: 0\n" + "--- !!data\n" + "1. some text\n" + "# position: 459, header: 0\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 16, used: 1\n" + " 624,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 624, header: 0\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 16, used: 0\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 784, header: 1\n" + "--- !!data\n" + "2. some more text\n" + "# position: 805, header: 2\n" + "--- !!data\n" + "3. some more text\n" + "# position: 826, header: 2 EOF\n" + "--- !!not-ready-meta-data! #binary\n" + "...\n" + "# 130242 bytes remaining\n" + "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 442,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 16,\n" + " indexSpacing: 2,\n" + " index2Index: 0,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: 0\n" + "--- !!data\n" + "4. some text - first cycle\n" + "# position: 472, header: 0 EOF\n" + "--- !!not-ready-meta-data! #binary\n" + "...\n" + "# 130596 bytes remaining\n" + "--- !!meta-data #binary\n" + "header: !SCQStore {\n" + " wireType: !WireType BINARY_LIGHT,\n" + " writePosition: [\n" + " 473,\n" + " 0\n" + " ],\n" + " roll: !SCQSRoll {\n" + " length: !int 86400000,\n" + " format: yyyyMMdd,\n" + " epoch: 0\n" + " },\n" + " indexing: !SCQSIndexing {\n" + " indexCount: 16,\n" + " indexSpacing: 2,\n" + " index2Index: 491,\n" + " lastIndex: 0\n" + " },\n" + " lastAcknowledgedIndexReplicated: -1,\n" + " recovery: !TimedStoreRecovery {\n" + " timeStamp: 0\n" + " },\n" + " deltaCheckpointInterval: 0,\n" + " lastIndexReplicated: -1,\n" + " sourceId: 0\n" + "}\n" + "# position: 442, header: 0\n" + "--- !!data\n" + "5. some text - second cycle\n" + "# position: 473, header: 1\n" + "--- !!data\n" + "some more text\n" + "# position: 491, header: 1\n" + "--- !!meta-data #binary\n" + "index2index: [\n" + " # length: 16, used: 1\n" + " 656,\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "# position: 656, header: 1\n" + "--- !!meta-data #binary\n" + "index: [\n" + " # length: 16, used: 0\n" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" + "]\n" + "...\n" + "# 130252 bytes remaining\n";
assertEquals(lazyIndexing ? expectedLazy : expectedEager, q.dump());
assertEquals(5, q.countExcerpts(start, end));
}
}
use of net.openhft.chronicle.queue.impl.single.SingleChronicleQueue in project Chronicle-Queue by OpenHFT.
the class ChronicleHistoryReader method readChronicle.
public Map<String, Histogram> readChronicle() {
final SingleChronicleQueue q = createQueue();
final ExcerptTailer tailer = q.createTailer();
final WireParselet parselet = parselet();
final MethodReader mr = new VanillaMethodReader(tailer, true, parselet, null, parselet);
MessageHistory.set(new VanillaMessageHistory());
while (!Thread.currentThread().isInterrupted() && mr.readOne()) {
++counter;
if (this.progress && counter % 1_000_000L == 0) {
System.out.println("Progress: " + counter);
}
}
return histos;
}
use of net.openhft.chronicle.queue.impl.single.SingleChronicleQueue in project Chronicle-Queue by OpenHFT.
the class ChronicleReader method execute.
public void execute() {
try {
long lastObservedTailIndex = Long.MAX_VALUE;
long highestReachedIndex = 0L;
boolean isFirstIteration = true;
boolean retryLastOperation = false;
boolean queueHasBeenModified = false;
do {
try (final SingleChronicleQueue queue = createQueue();
final QueueEntryHandler messageConverter = entryHandlerFactory.get()) {
final ExcerptTailer tailer = queue.createTailer();
queueHasBeenModified = false;
if (highestReachedIndex != 0L) {
tailer.moveToIndex(highestReachedIndex);
}
final Bytes textConversionTarget = Bytes.elasticByteBuffer();
try {
moveToSpecifiedPosition(queue, tailer, isFirstIteration);
lastObservedTailIndex = tailer.index();
while (!Thread.currentThread().isInterrupted()) {
try (DocumentContext dc = pollMethod.apply(tailer)) {
if (!dc.isPresent()) {
if (tailInputSource) {
pauser.pause();
}
break;
}
pauser.reset();
if (customPlugin == null) {
messageConverter.accept(dc.wire(), text -> {
applyFiltersAndLog(text, tailer.index());
});
} else {
customPlugin.onReadDocument(dc);
}
}
}
} finally {
textConversionTarget.release();
highestReachedIndex = tailer.index();
isFirstIteration = false;
}
queueHasBeenModified = queueHasBeenModifiedSinceLastCheck(lastObservedTailIndex);
} catch (final RuntimeException e) {
if (e.getCause() != null && e.getCause() instanceof DateTimeParseException) {
// ignore this error - due to a race condition between
// the reader creating a Queue (with default roll-cycle due to no files on disk)
// and the writer appending to the Queue with a non-default roll-cycle
retryLastOperation = true;
} else {
throw e;
}
}
} while (tailInputSource || retryLastOperation || queueHasBeenModified);
} catch (Throwable t) {
t.printStackTrace();
throw t;
}
}
use of net.openhft.chronicle.queue.impl.single.SingleChronicleQueue in project Chronicle-Queue by OpenHFT.
the class DumpQueueMainTest method shouldBeAbleToDumpReadOnlyQueueFile.
@Test
public void shouldBeAbleToDumpReadOnlyQueueFile() throws Exception {
if (OS.isWindows())
return;
final File dataDir = DirectoryUtils.tempDir(DumpQueueMainTest.class.getSimpleName());
try (final SingleChronicleQueue queue = SingleChronicleQueueBuilder.binary(dataDir).build()) {
final ExcerptAppender excerptAppender = queue.acquireAppender();
excerptAppender.writeText("first");
excerptAppender.writeText("last");
final Path queueFile = Files.list(dataDir.toPath()).filter(p -> p.toString().endsWith(SingleChronicleQueue.SUFFIX)).findFirst().orElseThrow(() -> new AssertionError("Could not find queue file in directory " + dataDir));
assertThat(queueFile.toFile().setWritable(false), is(true));
final CountingOutputStream countingOutputStream = new CountingOutputStream();
DumpQueueMain.dump(queueFile.toFile(), new PrintStream(countingOutputStream), Long.MAX_VALUE);
assertThat(countingOutputStream.bytes, is(not(0L)));
}
}
use of net.openhft.chronicle.queue.impl.single.SingleChronicleQueue in project Chronicle-Queue by OpenHFT.
the class LastAppendedTest method testLastWritten.
@Test
public void testLastWritten() {
SetTimeProvider timeProvider = new SetTimeProvider();
try (SingleChronicleQueue outQueue = single(getTmpDir()).rollCycle(RollCycles.TEST_SECONDLY).sourceId(1).timeProvider(timeProvider).build()) {
try (SingleChronicleQueue inQueue = single(getTmpDir()).rollCycle(RollCycles.TEST_SECONDLY).sourceId(2).timeProvider(timeProvider).build()) {
// write some initial data to the inqueue
final Msg msg = inQueue.acquireAppender().methodWriterBuilder(Msg.class).recordHistory(true).build();
msg.msg("somedata-0");
timeProvider.advanceMillis(1000);
// write data into the inQueue
msg.msg("somedata-1");
// read a message on the in queue and write it to the out queue
{
Msg out = outQueue.acquireAppender().methodWriterBuilder(Msg.class).recordHistory(true).build();
MethodReader methodReader = inQueue.createTailer().methodReader((Msg) out::msg);
// reads the somedata-0
methodReader.readOne();
// reads the somedata-1
methodReader.readOne();
}
// write data into the inQueue
msg.msg("somedata-2");
timeProvider.advanceMillis(2000);
msg.msg("somedata-3");
msg.msg("somedata-4");
System.out.println(inQueue.dump());
AtomicReference<String> actualValue = new AtomicReference<>();
// check that we are able to pick up from where we left off, in other words the next read should be somedata-2
{
ExcerptTailer excerptTailer = inQueue.createTailer().afterLastWritten(outQueue);
MethodReader methodReader = excerptTailer.methodReader((Msg) actualValue::set);
methodReader.readOne();
Assert.assertEquals("somedata-2", actualValue.get());
methodReader.readOne();
Assert.assertEquals("somedata-3", actualValue.get());
methodReader.readOne();
Assert.assertEquals("somedata-4", actualValue.get());
}
}
}
}
Aggregations