use of net.openhft.affinity.AffinityLock in project Chronicle-Queue by OpenHFT.
the class ChronicleQueueTwoThreadsTest method doTest.
void doTest(boolean buffered) throws InterruptedException {
File name = getTmpDir();
AtomicLong counter = new AtomicLong();
Thread tailerThread = new Thread(() -> {
AffinityLock rlock = AffinityLock.acquireLock();
Bytes bytes = NativeBytes.nativeBytes(BYTES_LENGTH).unchecked(true);
try (ChronicleQueue rqueue = SingleChronicleQueueBuilder.fieldlessBinary(name).testBlockSize().build()) {
ExcerptTailer tailer = rqueue.createTailer();
while (!Thread.interrupted()) {
bytes.clear();
if (tailer.readBytes(bytes)) {
counter.incrementAndGet();
}
}
} finally {
if (rlock != null) {
rlock.release();
}
// System.out.printf("Read %,d messages", counter.intValue());
}
}, "tailer thread");
long runs = 50_000;
Thread appenderThread = new Thread(() -> {
AffinityLock wlock = AffinityLock.acquireLock();
try {
ChronicleQueue wqueue = SingleChronicleQueueBuilder.fieldlessBinary(name).rollCycle(SMALL_DAILY).testBlockSize().writeBufferMode(buffered ? BufferMode.Asynchronous : BufferMode.None).build();
ExcerptAppender appender = wqueue.acquireAppender();
Bytes bytes = Bytes.allocateDirect(BYTES_LENGTH).unchecked(true);
long next = System.nanoTime() + INTERVAL_US * 1000;
for (int i = 0; i < runs; i++) {
while (System.nanoTime() < next) /* busy wait*/
;
long start = next;
bytes.readPositionRemaining(0, BYTES_LENGTH);
bytes.writeLong(0L, start);
appender.writeBytes(bytes);
next += INTERVAL_US * 1000;
}
wqueue.close();
} finally {
if (wlock != null) {
wlock.release();
}
}
}, "appender thread");
tailerThread.start();
Jvm.pause(100);
appenderThread.start();
appenderThread.join();
// Pause to allow tailer to catch up (if needed)
for (int i = 0; i < 10; i++) {
if (runs != counter.get())
Jvm.pause(Jvm.isDebug() ? 10000 : 100);
}
for (int i = 0; i < 10; i++) {
tailerThread.interrupt();
tailerThread.join(100);
}
assertEquals(runs, counter.get());
}
use of net.openhft.affinity.AffinityLock in project Chronicle-Queue by OpenHFT.
the class QueueMultiThreadedJLBHBenchmark method init.
@Override
public void init(JLBH jlbh) {
this.jlbh = jlbh;
IOTools.deleteDirWithFiles(path, 10);
sourceQueue = createQueueInstance();
sinkQueue = useSingleQueueInstance ? sourceQueue : createQueueInstance();
appender = sourceQueue.acquireAppender().disableThreadSafetyCheck(true);
tailer = sinkQueue.createTailer().disableThreadSafetyCheck(true);
NanoSampler readProbe = jlbh.addProbe("read");
writeProbe = jlbh.addProbe("write");
if (usePretoucher) {
pretoucherExecutorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("pretoucher", true));
pretoucherExecutorService.scheduleAtFixedRate(() -> sourceQueue.acquireAppender().pretouch(), 1, 200, TimeUnit.MILLISECONDS);
}
tailerThread = new Thread(() -> {
try (final AffinityLock affinityLock = AffinityLock.acquireLock(tailerAffinity)) {
Datum datum2 = new Datum(messageSize);
while (!stopped) {
long beforeReadNs = System.nanoTime();
try (DocumentContext dc = tailer.readingDocument()) {
if (!dc.isPresent())
continue;
datum2.readMarshallable(dc.wire().bytes());
long now = System.nanoTime();
jlbh.sample(now - datum2.ts);
readProbe.sampleNanos(now - beforeReadNs);
}
}
}
});
tailerThread.start();
}
use of net.openhft.affinity.AffinityLock in project Chronicle-Queue by OpenHFT.
the class LatencyDistributionMain method runTest.
protected void runTest(@NotNull ChronicleQueue queue, @NotNull ChronicleQueue queue2) throws InterruptedException {
Histogram histogramCo = new Histogram();
Histogram histogramIn = new Histogram();
Histogram histogramWr = new Histogram();
Thread pretoucher = new Thread(() -> {
ExcerptAppender appender = queue.acquireAppender();
try {
while (!Thread.currentThread().isInterrupted()) {
appender.pretouch();
Jvm.pause(50);
}
} catch (Exception e) {
if (!appender.isClosed())
e.printStackTrace();
}
});
pretoucher.setDaemon(true);
pretoucher.start();
ExcerptAppender appender = queue.acquireAppender();
// two queues as most like in a different process.
ExcerptTailer tailer = queue2.createTailer();
String name = getClass().getName();
Thread tailerThread = new Thread(() -> {
AffinityLock lock = null;
try {
if (Jvm.getBoolean("enableTailerAffinity") || !Jvm.getBoolean("disableAffinity")) {
lock = Affinity.acquireLock();
}
int counter = 0;
while (!Thread.currentThread().isInterrupted()) {
try {
// if (SAMPLING)
// sampler.thread(Thread.currentThread());
// boolean found = tailer.readDocument(myReadMarshallable);
boolean found;
try (DocumentContext dc = tailer.readingDocument()) {
found = dc.isPresent();
if (found) {
int count = counter++;
if (count == WARMUP) {
histogramCo.reset();
histogramIn.reset();
histogramWr.reset();
}
Bytes<?> bytes = dc.wire().bytes();
long startCo = bytes.readLong();
long startIn = bytes.readLong();
long now = System.nanoTime();
histogramCo.sample(now - startCo);
histogramIn.sample(now - startIn);
if (count % INTLOG_INTERVAL == 0)
System.out.println("read " + count);
}
}
/*
if (SAMPLING) {
StackTraceElement[] stack = sampler.getAndReset();
if (stack != null) {
if (!stack[0].getClassName().equals(name) &&
!stack[0].getClassName().equals("java.lang.Thread")) {
StringBuilder sb = new StringBuilder();
Jvm.trimStackTrace(sb, stack);
// System.out.println(sb);
}
} else if (!found) {
Thread.yield();
}
}
*/
} catch (Exception e) {
break;
}
}
} finally {
if (lock != null) {
lock.release();
}
}
});
Thread appenderThread = new Thread(() -> {
AffinityLock lock = null;
try {
if (Jvm.getBoolean("enableAppenderAffinity") || !Jvm.getBoolean("disableAffinity")) {
lock = Affinity.acquireLock();
}
long next = System.nanoTime();
long interval = 1_000_000_000 / throughput;
Map<String, Integer> stackCount = new LinkedHashMap<>();
BytesStore<?, ?> bytes24 = BytesStore.nativeStoreFrom(new byte[Main.size - 16]);
for (int i = -WARMUP; i < iterations; i++) {
long s0 = System.nanoTime();
if (s0 < next) {
do ; while (System.nanoTime() < next);
// if we failed to come out of the spin loop on time, reset next.
next = System.nanoTime();
}
if (SAMPLING) {
sampler.thread(Thread.currentThread());
}
long start = System.nanoTime();
try (@NotNull DocumentContext dc = appender.writingDocument(false)) {
Wire wire = dc.wire();
Bytes<?> bytes2 = wire.bytes();
// when it should have started
bytes2.writeLong(next);
// when it actually started.
bytes2.writeLong(start);
bytes2.write(bytes24);
ThroughputMain.addToEndOfCache(wire);
}
long time = System.nanoTime() - start;
histogramWr.sample(start - next);
if (SAMPLING && time > 1e3 && i > 0) {
StackTraceElement[] stack = sampler.getAndReset();
if (stack != null) {
if (!stack[0].getClassName().equals(name) && !stack[0].getClassName().equals("java.lang.Thread")) {
StringBuilder sb = new StringBuilder();
Jvm.trimStackTrace(sb, stack);
stackCount.compute(sb.toString(), (k, v) -> v == null ? 1 : v + 1);
}
}
}
next += interval;
if (i % INTLOG_INTERVAL == 0)
System.out.println("wrote " + i);
}
stackCount.entrySet().stream().filter(e -> e.getValue() > 1).forEach(System.out::println);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (lock != null) {
lock.release();
}
}
});
tailerThread.start();
appenderThread.start();
appenderThread.join();
pretoucher.interrupt();
pretoucher.join();
// Pause to allow tailer to catch up (if needed)
Jvm.pause(500);
tailerThread.interrupt();
tailerThread.join();
System.out.println("wr: " + histogramWr.toLongMicrosFormat());
System.out.println("in: " + histogramIn.toLongMicrosFormat());
System.out.println("co: " + histogramCo.toLongMicrosFormat());
}
Aggregations