use of net.openhft.chronicle.bytes.Bytes in project Chronicle-Queue by OpenHFT.
the class ThreadedQueueTest method testMultipleThreads.
@Test(timeout = 10000)
public void testMultipleThreads() throws InterruptedException, ExecutionException, TimeoutException {
final File path = getTmpDir();
final AtomicInteger counter = new AtomicInteger();
ExecutorService tailerES = Executors.newSingleThreadExecutor(new NamedThreadFactory("tailer"));
Future tf = tailerES.submit(() -> {
try (final ChronicleQueue rqueue = ChronicleQueue.singleBuilder(path).testBlockSize().build()) {
final ExcerptTailer tailer = rqueue.createTailer();
final Bytes bytes = Bytes.elasticByteBuffer();
while (counter.get() < REQUIRED_COUNT && !Thread.interrupted()) {
bytes.clear();
if (tailer.readBytes(bytes))
counter.incrementAndGet();
}
bytes.releaseLast();
} catch (Throwable t) {
t.printStackTrace();
}
});
ExecutorService appenderES = Executors.newSingleThreadExecutor(new NamedThreadFactory("appender"));
Future af = appenderES.submit(() -> {
try (final ChronicleQueue wqueue = ChronicleQueue.singleBuilder(path).testBlockSize().build()) {
final ExcerptAppender appender = wqueue.acquireAppender();
final Bytes message = Bytes.elasticByteBuffer();
for (int i = 0; i < REQUIRED_COUNT; i++) {
message.clear();
message.append(i);
appender.writeBytes(message);
}
message.releaseLast();
} catch (Throwable t) {
t.printStackTrace();
}
});
appenderES.shutdown();
tailerES.shutdown();
long end = System.currentTimeMillis() + 9000;
af.get(9000, TimeUnit.MILLISECONDS);
tf.get(end - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
assertEquals(REQUIRED_COUNT, counter.get());
}
use of net.openhft.chronicle.bytes.Bytes in project Chronicle-Queue by OpenHFT.
the class LatencyDistributionMain method runTest.
protected void runTest(@NotNull ChronicleQueue queue, @NotNull ChronicleQueue queue2) throws InterruptedException {
Histogram histogramCo = new Histogram();
Histogram histogramIn = new Histogram();
Histogram histogramWr = new Histogram();
Thread pretoucher = new Thread(() -> {
ExcerptAppender appender = queue.acquireAppender();
try {
while (!Thread.currentThread().isInterrupted()) {
appender.pretouch();
Jvm.pause(50);
}
} catch (Exception e) {
if (!appender.isClosed())
e.printStackTrace();
}
});
pretoucher.setDaemon(true);
pretoucher.start();
ExcerptAppender appender = queue.acquireAppender();
// two queues as most like in a different process.
ExcerptTailer tailer = queue2.createTailer();
String name = getClass().getName();
Thread tailerThread = new Thread(() -> {
AffinityLock lock = null;
try {
if (Jvm.getBoolean("enableTailerAffinity") || !Jvm.getBoolean("disableAffinity")) {
lock = Affinity.acquireLock();
}
int counter = 0;
while (!Thread.currentThread().isInterrupted()) {
try {
// if (SAMPLING)
// sampler.thread(Thread.currentThread());
// boolean found = tailer.readDocument(myReadMarshallable);
boolean found;
try (DocumentContext dc = tailer.readingDocument()) {
found = dc.isPresent();
if (found) {
int count = counter++;
if (count == WARMUP) {
histogramCo.reset();
histogramIn.reset();
histogramWr.reset();
}
Bytes<?> bytes = dc.wire().bytes();
long startCo = bytes.readLong();
long startIn = bytes.readLong();
long now = System.nanoTime();
histogramCo.sample(now - startCo);
histogramIn.sample(now - startIn);
if (count % INTLOG_INTERVAL == 0)
System.out.println("read " + count);
}
}
/*
if (SAMPLING) {
StackTraceElement[] stack = sampler.getAndReset();
if (stack != null) {
if (!stack[0].getClassName().equals(name) &&
!stack[0].getClassName().equals("java.lang.Thread")) {
StringBuilder sb = new StringBuilder();
Jvm.trimStackTrace(sb, stack);
// System.out.println(sb);
}
} else if (!found) {
Thread.yield();
}
}
*/
} catch (Exception e) {
break;
}
}
} finally {
if (lock != null) {
lock.release();
}
}
});
Thread appenderThread = new Thread(() -> {
AffinityLock lock = null;
try {
if (Jvm.getBoolean("enableAppenderAffinity") || !Jvm.getBoolean("disableAffinity")) {
lock = Affinity.acquireLock();
}
long next = System.nanoTime();
long interval = 1_000_000_000 / throughput;
Map<String, Integer> stackCount = new LinkedHashMap<>();
BytesStore<?, ?> bytes24 = BytesStore.nativeStoreFrom(new byte[Main.size - 16]);
for (int i = -WARMUP; i < iterations; i++) {
long s0 = System.nanoTime();
if (s0 < next) {
do ; while (System.nanoTime() < next);
// if we failed to come out of the spin loop on time, reset next.
next = System.nanoTime();
}
if (SAMPLING) {
sampler.thread(Thread.currentThread());
}
long start = System.nanoTime();
try (@NotNull DocumentContext dc = appender.writingDocument(false)) {
Wire wire = dc.wire();
Bytes<?> bytes2 = wire.bytes();
// when it should have started
bytes2.writeLong(next);
// when it actually started.
bytes2.writeLong(start);
bytes2.write(bytes24);
ThroughputMain.addToEndOfCache(wire);
}
long time = System.nanoTime() - start;
histogramWr.sample(start - next);
if (SAMPLING && time > 1e3 && i > 0) {
StackTraceElement[] stack = sampler.getAndReset();
if (stack != null) {
if (!stack[0].getClassName().equals(name) && !stack[0].getClassName().equals("java.lang.Thread")) {
StringBuilder sb = new StringBuilder();
Jvm.trimStackTrace(sb, stack);
stackCount.compute(sb.toString(), (k, v) -> v == null ? 1 : v + 1);
}
}
}
next += interval;
if (i % INTLOG_INTERVAL == 0)
System.out.println("wrote " + i);
}
stackCount.entrySet().stream().filter(e -> e.getValue() > 1).forEach(System.out::println);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (lock != null) {
lock.release();
}
}
});
tailerThread.start();
appenderThread.start();
appenderThread.join();
pretoucher.interrupt();
pretoucher.join();
// Pause to allow tailer to catch up (if needed)
Jvm.pause(500);
tailerThread.interrupt();
tailerThread.join();
System.out.println("wr: " + histogramWr.toLongMicrosFormat());
System.out.println("in: " + histogramIn.toLongMicrosFormat());
System.out.println("co: " + histogramCo.toLongMicrosFormat());
}
use of net.openhft.chronicle.bytes.Bytes in project Chronicle-Queue by OpenHFT.
the class BytesRingBufferTest method testPollWithNoData.
@Test
public void testPollWithNoData() {
try (NativeBytesStore<Void> nativeStore = NativeBytesStore.nativeStoreWithFixedCapacity(150)) {
assert nativeStore.isNative();
final BytesRingBuffer bytesRingBuffer = new BytesRingBuffer(nativeStore.bytes());
Bytes actual = bytesRingBuffer.poll(maxSize -> input.clear());
assertEquals(null, actual);
}
}
use of net.openhft.chronicle.bytes.Bytes in project Chronicle-Queue by OpenHFT.
the class BytesRingBufferTest method testFlowAroundSingleThreadedWriteDifferentSizeBuffers.
@Test
public void testFlowAroundSingleThreadedWriteDifferentSizeBuffers() {
try (NativeBytesStore<Void> nativeStore = NativeBytesStore.nativeStoreWithFixedCapacity(150)) {
// System.out.println(nativeStore.limit());
assert !nativeStore.isElastic();
Bytes<Void> bytes = nativeStore.bytes();
for (int j = 23 + 34; j < 100; j++) {
final BytesRingBuffer bytesRingBuffer = new BytesRingBuffer(nativeStore.bytes());
for (int i = 0; i < 50; i++) {
bytesRingBuffer.offer(data());
String actual = bytesRingBuffer.take(maxSize -> input.clear()).readUTFΔ();
assertEquals(EXPECTED, actual);
}
}
}
}
use of net.openhft.chronicle.bytes.Bytes in project Chronicle-Queue by OpenHFT.
the class BytesRingBufferTest method testWriteAndRead.
@Test
public void testWriteAndRead() {
try (NativeBytesStore<Void> nativeStore = NativeBytesStore.nativeStoreWithFixedCapacity(150)) {
assert nativeStore.isNative();
final BytesRingBuffer bytesRingBuffer = new BytesRingBuffer(nativeStore.bytes());
data();
bytesRingBuffer.offer(data());
Bytes actual = bytesRingBuffer.take(maxSize -> input.clear());
assertEquals(EXPECTED, actual.readUTFΔ());
}
}
Aggregations