use of net.openhft.chronicle.core.util.Histogram in project Chronicle-Queue by OpenHFT.
the class ChronicleHistoryReaderTest method doTest.
private void doTest(boolean recordHistoryFirst) {
VanillaMessageHistory veh = new VanillaMessageHistory();
veh.addSourceDetails(true);
MessageHistory.set(veh);
int extraTiming = recordHistoryFirst ? 1 : 0;
long nanoTime = System.nanoTime();
File queuePath = new File(OS.TARGET, "testWithQueueHistory-" + nanoTime);
File queuePath2 = new File(OS.TARGET, "testWithQueueHistory2-" + nanoTime);
File queuePath3 = new File(OS.TARGET, "testWithQueueHistory3-" + nanoTime);
try {
try (SingleChronicleQueue out = SingleChronicleQueueBuilder.binary(queuePath).testBlockSize().sourceId(1).build()) {
DummyListener writer = out.acquireAppender().methodWriterBuilder(DummyListener.class).recordHistory(recordHistoryFirst).get();
writer.say("hello");
}
try (SingleChronicleQueue in = SingleChronicleQueueBuilder.binary(queuePath).testBlockSize().sourceId(1).build();
SingleChronicleQueue out = SingleChronicleQueueBuilder.binary(queuePath2).testBlockSize().build()) {
DummyListener writer = out.acquireAppender().methodWriterBuilder(DummyListener.class).recordHistory(true).get();
DummyListener dummy = msg -> {
MessageHistory history = MessageHistory.get();
Assert.assertEquals(1, history.sources());
// written 1st then received by me
Assert.assertEquals(1 + extraTiming, history.timings());
writer.say(msg);
};
MethodReader reader = in.createTailer().methodReader(dummy);
assertTrue(reader.readOne());
assertFalse(reader.readOne());
}
try (SingleChronicleQueue in = SingleChronicleQueueBuilder.binary(queuePath2).testBlockSize().sourceId(2).build();
SingleChronicleQueue out = SingleChronicleQueueBuilder.binary(queuePath3).testBlockSize().build()) {
DummyListener writer = out.acquireAppender().methodWriterBuilder(DummyListener.class).recordHistory(true).get();
DummyListener dummy = msg -> {
MessageHistory history = MessageHistory.get();
Assert.assertEquals(2, history.sources());
Assert.assertEquals(3 + extraTiming, history.timings());
writer.say(msg);
};
MethodReader reader = in.createTailer().methodReader(dummy);
assertTrue(reader.readOne());
assertFalse(reader.readOne());
}
ChronicleHistoryReader chronicleHistoryReader = new ChronicleHistoryReader().withBasePath(queuePath3.toPath()).withTimeUnit(TimeUnit.MICROSECONDS);
Map<String, Histogram> histos = chronicleHistoryReader.readChronicle();
chronicleHistoryReader.withMessageSink(System.out::println);
chronicleHistoryReader.outputData();
if (recordHistoryFirst) {
Assert.assertEquals(5, histos.size());
Assert.assertEquals("[1, startTo1, 2, 1to2, endToEnd]", histos.keySet().toString());
} else {
Assert.assertEquals(4, histos.size());
Assert.assertEquals("[1, 2, 1to2, endToEnd]", histos.keySet().toString());
}
} finally {
try {
IOTools.shallowDeleteDirWithFiles(queuePath);
IOTools.shallowDeleteDirWithFiles(queuePath2);
IOTools.shallowDeleteDirWithFiles(queuePath3);
} catch (Exception e) {
}
}
}
use of net.openhft.chronicle.core.util.Histogram in project Chronicle-Bytes by OpenHFT.
the class BytesTest method writeHistogram.
@Test
public void writeHistogram() {
@NotNull Bytes bytes = alloc1.elasticBytes(0xFFFFF);
@NotNull Histogram hist = new Histogram();
hist.sample(10);
@NotNull Histogram hist2 = new Histogram();
for (int i = 0; i < 10000; i++) hist2.sample(i);
bytes.writeHistogram(hist);
bytes.writeHistogram(hist2);
@NotNull Histogram histB = new Histogram();
@NotNull Histogram histC = new Histogram();
bytes.readHistogram(histB);
bytes.readHistogram(histC);
assertEquals(hist, histB);
assertEquals(hist2, histC);
bytes.release();
}
use of net.openhft.chronicle.core.util.Histogram in project Chronicle-Bytes by OpenHFT.
the class NativeBytesStoreTest method testCipherPerf.
@Test
public void testCipherPerf() throws NoSuchPaddingException, NoSuchAlgorithmException, InvalidKeyException {
byte[] keyBytes = new SecureRandom().generateSeed(16);
SecretKeySpec key = new SecretKeySpec(keyBytes, "AES");
Cipher encCipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
Cipher decCipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
encCipher.init(Cipher.ENCRYPT_MODE, key);
decCipher.init(Cipher.DECRYPT_MODE, key);
String expected = "Hello World!!";
while (expected.length() < 100) expected += " 123456789";
Bytes bytes = Bytes.allocateDirect(expected.getBytes());
Bytes enc = Bytes.allocateElasticDirect();
Bytes dec = Bytes.allocateElasticDirect();
Histogram hist = new Histogram();
for (int t = 1; t <= 4; t++) {
for (int i = 0; i < t * 100000; i++) {
enc.clear();
dec.clear();
long start = System.nanoTime();
bytes.cipher(encCipher, enc);
enc.cipher(decCipher, dec);
long time = System.nanoTime() - start;
hist.sampleNanos(time);
}
assertEquals(expected, dec.toString());
System.out.println("Encrypt/Decrypt took " + hist.toMicrosFormat());
}
bytes.release();
enc.release();
dec.release();
}
use of net.openhft.chronicle.core.util.Histogram in project Chronicle-Queue by OpenHFT.
the class StagedPerformanceMain method runBenchmark.
private static void runBenchmark(int count, int interval, boolean warmup) {
String run = Long.toString(System.nanoTime(), 36);
WARMUP = warmup;
DIR = PATH + "/run-" + run;
new File(DIR).mkdirs();
// every 60 seconds.
REPORT_INTERVAL = (int) (60e9 / interval);
try (ChronicleQueue queue = SingleChronicleQueueBuilder.binary(DIR + "/data").build()) {
q0 = queue;
List<Runnable> runnables = new ArrayList<>();
runnables.add(() -> producer(count, interval));
runnables.add(() -> firstStage());
for (int s = 2; s <= STAGES; s++) {
int finalS = s;
runnables.add(() -> runStage(finalS));
}
runnables.stream().parallel().forEach(Runnable::run);
Histogram latencies = new Histogram();
reportLatenciesForStage(STAGES, latencies);
}
IOTools.deleteDirWithFiles(DIR, 3);
}
use of net.openhft.chronicle.core.util.Histogram in project Chronicle-Queue by OpenHFT.
the class ChronicleQueueLatencyDistribution method runTest.
protected void runTest(@NotNull ChronicleQueue queue, int throughput) throws InterruptedException {
/*
Jvm.setExceptionHandlers(PrintExceptionHandler.ERR,
PrintExceptionHandler.OUT,
PrintExceptionHandler.OUT);
*/
Histogram histogramCo = new Histogram();
Histogram histogramIn = new Histogram();
Histogram histogramWr = new Histogram();
if (PRETOUCH) {
Thread pretoucher = new Thread(() -> {
ExcerptAppender appender = queue.acquireAppender();
while (!Thread.currentThread().isInterrupted()) {
appender.pretouch();
Jvm.pause(500);
}
});
pretoucher.setName("pret");
pretoucher.setDaemon(true);
pretoucher.start();
}
ExcerptAppender appender = queue.acquireAppender();
ExcerptTailer tailer = queue.createTailer();
String name = getClass().getName();
Thread tailerThread = new Thread(() -> {
AffinityLock lock = null;
try {
if (Jvm.getBoolean("enableTailerAffinity") || Jvm.getBoolean("enableAffinity")) {
lock = Affinity.acquireLock();
}
int counter = 0;
while (!Thread.currentThread().isInterrupted()) {
try {
// if (SAMPLING)
// sampler.thread(Thread.currentThread());
// boolean found = tailer.readDocument(myReadMarshallable);
boolean found;
try (DocumentContext dc = tailer.readingDocument()) {
found = dc.isPresent();
if (found) {
int count = counter++;
if (count == WARMUP) {
histogramCo.reset();
histogramIn.reset();
histogramWr.reset();
}
Bytes<?> bytes = dc.wire().bytes();
long startCo = bytes.readLong();
long startIn = bytes.readLong();
long now = System.nanoTime();
histogramCo.sample(now - startCo);
histogramIn.sample(now - startIn);
// if (count % 1_000_000 == 0) System.out.println("read " + count);
}
}
/*
if (SAMPLING) {
StackTraceElement[] stack = sampler.getAndReset();
if (stack != null) {
if (!stack[0].getClassName().equals(name) &&
!stack[0].getClassName().equals("java.lang.Thread")) {
StringBuilder sb = new StringBuilder();
Jvm.trimStackTrace(sb, stack);
// System.out.println(sb);
}
} else if (!found) {
Thread.yield();
}
}
*/
} catch (Exception e) {
break;
}
}
} finally {
if (lock != null) {
lock.release();
}
}
});
Thread appenderThread = new Thread(() -> {
AffinityLock lock = null;
try {
if (Jvm.getBoolean("enableAppenderAffinity") || Jvm.getBoolean("enableAffinity")) {
lock = Affinity.acquireLock();
}
long next = System.nanoTime();
long interval = 1_000_000_000 / throughput;
Map<String, Integer> stackCount = new LinkedHashMap<>();
BytesStore bytes24 = BytesStore.nativeStore(24);
for (int i = -WARMUP; i < ITERATIONS; i++) {
long s0 = System.nanoTime();
if (s0 < next) {
do ; while (System.nanoTime() < next);
// if we failed to come out of the spin loop on time, reset next.
next = System.nanoTime();
}
if (SAMPLING) {
sampler.thread(Thread.currentThread());
}
long start = System.nanoTime();
try (@NotNull DocumentContext dc = appender.writingDocument(false)) {
Bytes<?> bytes2 = dc.wire().bytes();
// when it should have started
bytes2.writeLong(next);
// when it actually started.
bytes2.writeLong(start);
bytes2.write(bytes24);
}
long time = System.nanoTime() - start;
histogramWr.sample(start - next);
if (SAMPLING && time > SAMPLE_THRESHOLD_NS && i > 0) {
StackTraceElement[] stack = sampler.getAndReset();
if (stack != null) {
if (!stack[0].getClassName().equals(name) && !stack[0].getClassName().equals("java.lang.Thread")) {
StringBuilder sb = new StringBuilder();
Jvm.trimStackTrace(sb, stack);
stackCount.compute(sb.toString(), (k, v) -> v == null ? 1 : v + 1);
}
}
}
next += interval;
// if (i % 1_000_000 == 0) System.out.println("wrote " + i);
}
stackCount.entrySet().stream().filter(e -> e.getValue() > 1).sorted(Comparator.comparingInt(Map.Entry::getValue)).forEach(System.out::println);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (lock != null) {
lock.release();
}
}
});
tailerThread.setName("tail");
tailerThread.start();
appenderThread.setName("appd");
appenderThread.start();
appenderThread.join();
// Pause to allow tailer to catch up (if needed)
Jvm.pause(500);
tailerThread.interrupt();
tailerThread.join();
// System.out.println("wr: " + histogramWr.toMicrosFormat());
// System.out.println("in: " + histogramIn.toMicrosFormat());
// System.out.println("co: " + histogramCo.toMicrosFormat());
}
Aggregations