use of com.wavefront.common.Pair in project java by wavefrontHQ.
the class JsonMetricsGeneratorTest method testTranslator.
@Test
public void testTranslator() throws IOException {
Counter counter = testRegistry.newCounter(new MetricName("test", "foo", "bar"));
counter.inc();
counter.inc();
String json = generate(false, false, false, metricNameMetricPair -> {
assertThat(metricNameMetricPair._1).isEquivalentAccordingToCompareTo(new MetricName("test", "foo", "bar"));
assertThat(metricNameMetricPair._2).isInstanceOf(Counter.class);
assertThat(((Counter) metricNameMetricPair._2).count()).isEqualTo(2);
return new Pair<>(new MetricName("test", "baz", "qux"), metricNameMetricPair._2);
});
assertThat(json).isEqualTo("{\"test.qux\":2}");
json = generate(false, false, false, metricNameMetricPair -> null);
assertThat(json).isEqualTo("{}");
}
use of com.wavefront.common.Pair in project java by wavefrontHQ.
the class ConcurrentShardedQueueFileTest method testConcurrency.
@Test
public void testConcurrency() throws Exception {
File file = new File(File.createTempFile("proxyConcurrencyTest", null).getPath() + ".spool");
ConcurrentShardedQueueFile queueFile = new ConcurrentShardedQueueFile(file.getCanonicalPath(), ".spool", 1024 * 1024, s -> new TapeQueueFile(new QueueFile.Builder(new File(s)).build()));
Queue<Pair<Integer, Byte>> taskCheatSheet = new ArrayDeque<>();
System.out.println(queueFile.shards.size());
AtomicLong tasksGenerated = new AtomicLong();
AtomicLong nanosAdd = new AtomicLong();
AtomicLong nanosGet = new AtomicLong();
while (queueFile.shards.size() < 4) {
byte[] task = randomTask();
queueFile.add(task);
taskCheatSheet.add(Pair.of(task.length, task[0]));
tasksGenerated.incrementAndGet();
}
AtomicBoolean done = new AtomicBoolean(false);
AtomicBoolean fail = new AtomicBoolean(false);
Runnable addTask = () -> {
int delay = 0;
while (!done.get() && !fail.get()) {
try {
byte[] task = randomTask();
long start = System.nanoTime();
queueFile.add(task);
nanosAdd.addAndGet(System.nanoTime() - start);
taskCheatSheet.add(Pair.of(task.length, task[0]));
tasksGenerated.incrementAndGet();
Thread.sleep(delay / 1000);
delay++;
} catch (Exception e) {
e.printStackTrace();
fail.set(true);
}
}
};
Runnable getTask = () -> {
int delay = 2000;
while (!taskCheatSheet.isEmpty() && !fail.get()) {
try {
long start = System.nanoTime();
Pair<Integer, Byte> taskData = taskCheatSheet.remove();
byte[] task = queueFile.peek();
queueFile.remove();
nanosGet.addAndGet(System.nanoTime() - start);
if (taskData._1 != task.length) {
System.out.println("Data integrity fail! Expected: " + taskData._1 + " bytes, got " + task.length + " bytes");
fail.set(true);
}
for (byte b : task) {
if (taskData._2 != b) {
System.out.println("Data integrity fail! Expected " + taskData._2 + ", got " + b);
fail.set(true);
}
}
Thread.sleep(delay / 500);
if (delay > 0)
delay--;
} catch (Exception e) {
e.printStackTrace();
fail.set(true);
}
}
done.set(true);
};
ExecutorService executor = Executors.newFixedThreadPool(2);
long start = System.nanoTime();
Future<?> addFuture = executor.submit(addTask);
Future<?> getFuture = executor.submit(getTask);
addFuture.get();
getFuture.get();
assertFalse(fail.get());
System.out.println("Tasks generated: " + tasksGenerated.get());
System.out.println("Real time (ms) = " + (System.nanoTime() - start) / 1_000_000);
System.out.println("Add + remove time (ms) = " + (nanosGet.get() + nanosAdd.get()) / 1_000_000);
System.out.println("Add time (ms) = " + nanosAdd.get() / 1_000_000);
System.out.println("Remove time (ms) = " + nanosGet.get() / 1_000_000);
}
use of com.wavefront.common.Pair in project java by wavefrontHQ.
the class JsonMetricsPortUnificationHandler method handleHttpMessage.
@Override
protected void handleHttpMessage(final ChannelHandlerContext ctx, final FullHttpRequest request) throws URISyntaxException {
StringBuilder output = new StringBuilder();
try {
URI uri = new URI(request.uri());
Map<String, String> params = Arrays.stream(uri.getRawQuery().split("&")).map(x -> new Pair<>(x.split("=")[0].trim().toLowerCase(), x.split("=")[1])).collect(Collectors.toMap(k -> k._1, v -> v._2));
String requestBody = request.content().toString(CharsetUtil.UTF_8);
Map<String, String> tags = Maps.newHashMap();
params.entrySet().stream().filter(x -> !STANDARD_PARAMS.contains(x.getKey()) && x.getValue().length() > 0).forEach(x -> tags.put(x.getKey(), x.getValue()));
List<ReportPoint> points = new ArrayList<>();
long timestamp;
if (params.get("d") == null) {
timestamp = Clock.now();
} else {
try {
timestamp = Long.parseLong(params.get("d"));
} catch (NumberFormatException e) {
timestamp = Clock.now();
}
}
String prefix = this.prefix == null ? params.get("p") : params.get("p") == null ? this.prefix : this.prefix + "." + params.get("p");
String host = params.get("h") == null ? defaultHost : params.get("h");
JsonNode metrics = jsonParser.readTree(requestBody);
ReportableEntityPreprocessor preprocessor = preprocessorSupplier == null ? null : preprocessorSupplier.get();
String[] messageHolder = new String[1];
JsonMetricsParser.report("dummy", prefix, metrics, points, host, timestamp);
for (ReportPoint point : points) {
if (point.getAnnotations().isEmpty()) {
point.setAnnotations(tags);
} else {
Map<String, String> newAnnotations = Maps.newHashMap(tags);
newAnnotations.putAll(point.getAnnotations());
point.setAnnotations(newAnnotations);
}
if (preprocessor != null) {
preprocessor.forReportPoint().transform(point);
if (!preprocessor.forReportPoint().filter(point, messageHolder)) {
if (messageHolder[0] != null) {
pointHandler.reject(point, messageHolder[0]);
} else {
pointHandler.block(point);
}
continue;
}
}
pointHandler.report(point);
}
writeHttpResponse(ctx, HttpResponseStatus.OK, output, request);
} catch (IOException e) {
logWarning("WF-300: Error processing incoming JSON request", e, ctx);
writeHttpResponse(ctx, HttpResponseStatus.INTERNAL_SERVER_ERROR, output, request);
}
}
Aggregations