use of com.wavefront.agent.preprocessor.ReportableEntityPreprocessor in project java by wavefrontHQ.
the class WriteHttpJsonPortUnificationHandler method reportMetrics.
private void reportMetrics(JsonNode metrics) {
ReportableEntityPreprocessor preprocessor = preprocessorSupplier == null ? null : preprocessorSupplier.get();
String[] messageHolder = new String[1];
for (final JsonNode metric : metrics) {
JsonNode host = metric.get("host");
String hostName;
if (host != null) {
hostName = host.textValue();
if (hostName == null || hostName.isEmpty()) {
hostName = defaultHost;
}
} else {
hostName = defaultHost;
}
JsonNode time = metric.get("time");
long ts = 0;
if (time != null) {
ts = time.asLong() * 1000;
}
JsonNode values = metric.get("values");
if (values == null) {
pointHandler.reject((ReportPoint) null, "[values] missing in JSON object");
logger.warning("Skipping - [values] missing in JSON object.");
continue;
}
int index = 0;
for (final JsonNode value : values) {
String metricName = getMetricName(metric, index);
ReportPoint.Builder builder = ReportPoint.newBuilder().setMetric(metricName).setTable("dummy").setTimestamp(ts).setHost(hostName);
if (value.isDouble()) {
builder.setValue(value.asDouble());
} else {
builder.setValue(value.asLong());
}
List<ReportPoint> parsedPoints = new ArrayList<>(1);
ReportPoint point = builder.build();
if (preprocessor != null && preprocessor.forPointLine().getTransformers().size() > 0) {
//
String pointLine = ReportPointSerializer.pointToString(point);
pointLine = preprocessor.forPointLine().transform(pointLine);
recoder.decodeReportPoints(pointLine, parsedPoints, "dummy");
} else {
parsedPoints.add(point);
}
for (ReportPoint parsedPoint : parsedPoints) {
if (preprocessor != null) {
preprocessor.forReportPoint().transform(point);
if (!preprocessor.forReportPoint().filter(point, messageHolder)) {
if (messageHolder[0] != null) {
pointHandler.reject(point, messageHolder[0]);
} else {
pointHandler.block(point);
}
continue;
}
}
pointHandler.report(parsedPoint);
}
index++;
}
}
}
use of com.wavefront.agent.preprocessor.ReportableEntityPreprocessor in project java by wavefrontHQ.
the class OpenTSDBPortUnificationHandler method reportMetric.
/**
* Parse the individual metric object and send the metric to on to the point handler.
*
* @param metric the JSON object representing a single metric
* @param ctx channel handler context (to retrieve remote address)
* @return True if the metric was reported successfully; False o/w
* @see <a href="http://opentsdb.net/docs/build/html/api_http/put.html">OpenTSDB /api/put documentation</a>
*/
private boolean reportMetric(final JsonNode metric, ChannelHandlerContext ctx) {
try {
String metricName = metric.get("metric").textValue();
JsonNode tags = metric.get("tags");
Map<String, String> wftags = JsonMetricsParser.makeTags(tags);
String hostName;
if (wftags.containsKey("host")) {
hostName = wftags.get("host");
} else if (wftags.containsKey("source")) {
hostName = wftags.get("source");
} else {
hostName = resolver == null ? "unknown" : resolver.apply(getRemoteAddress(ctx));
}
// remove source/host from the tags list
Map<String, String> wftags2 = new HashMap<>();
for (Map.Entry<String, String> wftag : wftags.entrySet()) {
if (wftag.getKey().equalsIgnoreCase("host") || wftag.getKey().equalsIgnoreCase("source")) {
continue;
}
wftags2.put(wftag.getKey(), wftag.getValue());
}
ReportPoint.Builder builder = ReportPoint.newBuilder();
builder.setMetric(metricName);
JsonNode time = metric.get("timestamp");
// if timestamp is not available, fall back to Clock.now()
long ts = Clock.now();
if (time != null) {
int timestampSize = Long.toString(time.asLong()).length();
if (timestampSize == 19) {
// nanoseconds
ts = time.asLong() / 1000000;
} else if (timestampSize == 16) {
// microseconds
ts = time.asLong() / 1000;
} else if (timestampSize == 13) {
// milliseconds
ts = time.asLong();
} else {
// seconds
ts = time.asLong() * 1000;
}
}
builder.setTimestamp(ts);
JsonNode value = metric.get("value");
if (value == null) {
pointHandler.reject((ReportPoint) null, "Skipping. Missing 'value' in JSON node.");
return false;
}
if (value.isDouble()) {
builder.setValue(value.asDouble());
} else {
builder.setValue(value.asLong());
}
builder.setAnnotations(wftags2);
builder.setTable("dummy");
builder.setHost(hostName);
ReportPoint point = builder.build();
ReportableEntityPreprocessor preprocessor = preprocessorSupplier == null ? null : preprocessorSupplier.get();
String[] messageHolder = new String[1];
if (preprocessor != null) {
preprocessor.forReportPoint().transform(point);
if (!preprocessor.forReportPoint().filter(point, messageHolder)) {
if (messageHolder[0] != null) {
pointHandler.reject(point, messageHolder[0]);
return false;
} else {
pointHandler.block(point);
return true;
}
}
}
pointHandler.report(point);
return true;
} catch (final Exception e) {
logWarning("WF-300: Failed to add metric", e, null);
return false;
}
}
use of com.wavefront.agent.preprocessor.ReportableEntityPreprocessor in project java by wavefrontHQ.
the class SpanUtils method preprocessAndHandleSpan.
/**
* Preprocess and handle span.
*
* @param message encoded span data.
* @param decoder span decoder.
* @param handler span handler.
* @param spanReporter span reporter.
* @param preprocessorSupplier span preprocessor.
* @param ctx channel handler context.
* @param samplerFunc span sampler.
*/
public static void preprocessAndHandleSpan(String message, ReportableEntityDecoder<String, Span> decoder, ReportableEntityHandler<Span, String> handler, Consumer<Span> spanReporter, @Nullable Supplier<ReportableEntityPreprocessor> preprocessorSupplier, @Nullable ChannelHandlerContext ctx, Function<Span, Boolean> samplerFunc) {
ReportableEntityPreprocessor preprocessor = preprocessorSupplier == null ? null : preprocessorSupplier.get();
String[] messageHolder = new String[1];
// transform the line if needed
if (preprocessor != null) {
message = preprocessor.forPointLine().transform(message);
if (!preprocessor.forPointLine().filter(message, messageHolder)) {
if (messageHolder[0] != null) {
handler.reject((Span) null, messageHolder[0]);
} else {
handler.block(null, message);
}
return;
}
}
List<Span> output = new ArrayList<>(1);
try {
decoder.decode(message, output, "dummy");
} catch (Exception e) {
handler.reject(message, formatErrorMessage(message, e, ctx));
return;
}
for (Span object : output) {
if (preprocessor != null) {
preprocessor.forSpan().transform(object);
if (!preprocessor.forSpan().filter(object, messageHolder)) {
if (messageHolder[0] != null) {
handler.reject(object, messageHolder[0]);
} else {
handler.block(object);
}
return;
}
}
if (samplerFunc.apply(object)) {
spanReporter.accept(object);
}
}
}
use of com.wavefront.agent.preprocessor.ReportableEntityPreprocessor in project java by wavefrontHQ.
the class ZipkinPortUnificationHandler method processZipkinSpan.
private void processZipkinSpan(zipkin2.Span zipkinSpan) {
if (ZIPKIN_DATA_LOGGER.isLoggable(Level.FINEST)) {
ZIPKIN_DATA_LOGGER.info("Inbound Zipkin span: " + zipkinSpan.toString());
}
// Add application tags, span references, span kind and http uri, responses etc.
List<Annotation> annotations = new ArrayList<>();
// Add original Zipkin trace and span ids as tags to make finding them easier
annotations.add(new Annotation("zipkinSpanId", zipkinSpan.id()));
annotations.add(new Annotation("zipkinTraceId", zipkinSpan.traceId()));
// Set Span's References.
if (zipkinSpan.parentId() != null) {
annotations.add(new Annotation(TraceConstants.PARENT_KEY, Utils.convertToUuidString(zipkinSpan.parentId())));
}
// Set Span Kind.
if (zipkinSpan.kind() != null) {
String kind = zipkinSpan.kind().toString().toLowerCase();
annotations.add(new Annotation("span.kind", kind));
if (zipkinSpan.annotations() != null && !zipkinSpan.annotations().isEmpty()) {
annotations.add(new Annotation("_spanSecondaryId", kind));
}
}
// Set Span's service name.
String serviceName = zipkinSpan.localServiceName() == null ? DEFAULT_SERVICE : zipkinSpan.localServiceName();
annotations.add(new Annotation(SERVICE_TAG_KEY, serviceName));
String applicationName = this.proxyLevelApplicationName;
String cluster = NULL_TAG_VAL;
String shard = NULL_TAG_VAL;
String componentTagValue = NULL_TAG_VAL;
boolean isError = false;
boolean isDebugSpanTag = false;
// Set all other Span Tags.
Set<String> ignoreKeys = new HashSet<>(ImmutableSet.of(SOURCE_KEY));
if (zipkinSpan.tags() != null && zipkinSpan.tags().size() > 0) {
for (Map.Entry<String, String> tag : zipkinSpan.tags().entrySet()) {
if (!ignoreKeys.contains(tag.getKey().toLowerCase()) && !StringUtils.isBlank(tag.getValue())) {
Annotation annotation = new Annotation(tag.getKey(), tag.getValue());
switch(annotation.getKey()) {
case APPLICATION_TAG_KEY:
applicationName = annotation.getValue();
continue;
case CLUSTER_TAG_KEY:
cluster = annotation.getValue();
continue;
case SHARD_TAG_KEY:
shard = annotation.getValue();
continue;
case COMPONENT_TAG_KEY:
componentTagValue = annotation.getValue();
break;
case ERROR_SPAN_TAG_KEY:
isError = true;
// Ignore the original error value
annotation.setValue(ERROR_SPAN_TAG_VAL);
break;
case DEBUG_TAG_KEY:
isDebugSpanTag = annotation.getValue().equals(DEBUG_SPAN_TAG_VAL);
break;
}
annotations.add(annotation);
}
}
}
// Add all wavefront indexed tags. These are set based on below hierarchy.
// Span Level > Proxy Level > Default
annotations.add(new Annotation(APPLICATION_TAG_KEY, applicationName));
annotations.add(new Annotation(CLUSTER_TAG_KEY, cluster));
annotations.add(new Annotation(SHARD_TAG_KEY, shard));
// Add Sampling related annotations.
// Add a debug span tag as needed to enable sampling of this span with intelligent sampling.
boolean isDebug = zipkinSpan.debug() != null ? zipkinSpan.debug() : false;
if (!isDebugSpanTag && isDebug) {
annotations.add(new Annotation(DEBUG_SPAN_TAG_KEY, DEBUG_SPAN_TAG_VAL));
}
// Add additional annotations.
if (zipkinSpan.localEndpoint() != null && zipkinSpan.localEndpoint().ipv4() != null) {
annotations.add(new Annotation("ipv4", zipkinSpan.localEndpoint().ipv4()));
}
if (!spanLogsDisabled.get() && zipkinSpan.annotations() != null && !zipkinSpan.annotations().isEmpty()) {
annotations.add(new Annotation("_spanLogs", "true"));
}
/* Add source of the span following the below:
* 1. If "source" is provided by span tags , use it else
* 2. Default "source" to "zipkin".
*/
String sourceName = DEFAULT_SOURCE;
if (zipkinSpan.tags() != null && zipkinSpan.tags().size() > 0) {
if (zipkinSpan.tags().get(SOURCE_KEY) != null) {
sourceName = zipkinSpan.tags().get(SOURCE_KEY);
}
}
// Set spanName.
String spanName = zipkinSpan.name() == null ? DEFAULT_SPAN_NAME : zipkinSpan.name();
String spanId = Utils.convertToUuidString(zipkinSpan.id());
String traceId = Utils.convertToUuidString(zipkinSpan.traceId());
// Build wavefront span
Span wavefrontSpan = Span.newBuilder().setCustomer("dummy").setName(spanName).setSource(sourceName).setSpanId(spanId).setTraceId(traceId).setStartMillis(zipkinSpan.timestampAsLong() / 1000).setDuration(zipkinSpan.durationAsLong() / 1000).setAnnotations(annotations).build();
if (zipkinSpan.tags().containsKey(SPAN_TAG_ERROR)) {
if (ZIPKIN_DATA_LOGGER.isLoggable(Level.FINER)) {
ZIPKIN_DATA_LOGGER.info("Span id :: " + spanId + " with trace id :: " + traceId + " , includes error tag :: " + zipkinSpan.tags().get(SPAN_TAG_ERROR));
}
}
// Log Zipkin spans as well as Wavefront spans for debugging purposes.
if (ZIPKIN_DATA_LOGGER.isLoggable(Level.FINEST)) {
ZIPKIN_DATA_LOGGER.info("Converted Wavefront span: " + wavefrontSpan.toString());
}
if (preprocessorSupplier != null) {
ReportableEntityPreprocessor preprocessor = preprocessorSupplier.get();
String[] messageHolder = new String[1];
preprocessor.forSpan().transform(wavefrontSpan);
if (!preprocessor.forSpan().filter(wavefrontSpan, messageHolder)) {
if (messageHolder[0] != null) {
spanHandler.reject(wavefrontSpan, messageHolder[0]);
} else {
spanHandler.block(wavefrontSpan);
}
return;
}
}
if (sampler.sample(wavefrontSpan, discardedSpansBySampler)) {
spanHandler.report(wavefrontSpan);
if (zipkinSpan.annotations() != null && !zipkinSpan.annotations().isEmpty() && !isFeatureDisabled(spanLogsDisabled, SPANLOGS_DISABLED, null)) {
SpanLogs spanLogs = SpanLogs.newBuilder().setCustomer("default").setTraceId(wavefrontSpan.getTraceId()).setSpanId(wavefrontSpan.getSpanId()).setSpanSecondaryId(zipkinSpan.kind() != null ? zipkinSpan.kind().toString().toLowerCase() : null).setLogs(zipkinSpan.annotations().stream().map(x -> SpanLog.newBuilder().setTimestamp(x.timestamp()).setFields(ImmutableMap.of("annotation", x.value())).build()).collect(Collectors.toList())).build();
spanLogsHandler.report(spanLogs);
}
}
// report stats irrespective of span sampling.
if (wfInternalReporter != null) {
// Set post preprocessor rule values and report converted metrics/histograms from the span
List<Annotation> processedAnnotations = wavefrontSpan.getAnnotations();
for (Annotation processedAnnotation : processedAnnotations) {
switch(processedAnnotation.getKey()) {
case APPLICATION_TAG_KEY:
applicationName = processedAnnotation.getValue();
continue;
case SERVICE_TAG_KEY:
serviceName = processedAnnotation.getValue();
continue;
case CLUSTER_TAG_KEY:
cluster = processedAnnotation.getValue();
continue;
case SHARD_TAG_KEY:
shard = processedAnnotation.getValue();
continue;
case COMPONENT_TAG_KEY:
componentTagValue = processedAnnotation.getValue();
continue;
case ERROR_TAG_KEY:
isError = true;
continue;
}
}
List<Pair<String, String>> spanTags = processedAnnotations.stream().map(a -> new Pair<>(a.getKey(), a.getValue())).collect(Collectors.toList());
discoveredHeartbeatMetrics.add(reportWavefrontGeneratedData(wfInternalReporter, wavefrontSpan.getName(), applicationName, serviceName, cluster, shard, wavefrontSpan.getSource(), componentTagValue, isError, zipkinSpan.durationAsLong(), traceDerivedCustomTagKeys, spanTags, true));
}
}
use of com.wavefront.agent.preprocessor.ReportableEntityPreprocessor in project java by wavefrontHQ.
the class JsonMetricsPortUnificationHandler method handleHttpMessage.
@Override
protected void handleHttpMessage(final ChannelHandlerContext ctx, final FullHttpRequest request) throws URISyntaxException {
StringBuilder output = new StringBuilder();
try {
URI uri = new URI(request.uri());
Map<String, String> params = Arrays.stream(uri.getRawQuery().split("&")).map(x -> new Pair<>(x.split("=")[0].trim().toLowerCase(), x.split("=")[1])).collect(Collectors.toMap(k -> k._1, v -> v._2));
String requestBody = request.content().toString(CharsetUtil.UTF_8);
Map<String, String> tags = Maps.newHashMap();
params.entrySet().stream().filter(x -> !STANDARD_PARAMS.contains(x.getKey()) && x.getValue().length() > 0).forEach(x -> tags.put(x.getKey(), x.getValue()));
List<ReportPoint> points = new ArrayList<>();
long timestamp;
if (params.get("d") == null) {
timestamp = Clock.now();
} else {
try {
timestamp = Long.parseLong(params.get("d"));
} catch (NumberFormatException e) {
timestamp = Clock.now();
}
}
String prefix = this.prefix == null ? params.get("p") : params.get("p") == null ? this.prefix : this.prefix + "." + params.get("p");
String host = params.get("h") == null ? defaultHost : params.get("h");
JsonNode metrics = jsonParser.readTree(requestBody);
ReportableEntityPreprocessor preprocessor = preprocessorSupplier == null ? null : preprocessorSupplier.get();
String[] messageHolder = new String[1];
JsonMetricsParser.report("dummy", prefix, metrics, points, host, timestamp);
for (ReportPoint point : points) {
if (point.getAnnotations().isEmpty()) {
point.setAnnotations(tags);
} else {
Map<String, String> newAnnotations = Maps.newHashMap(tags);
newAnnotations.putAll(point.getAnnotations());
point.setAnnotations(newAnnotations);
}
if (preprocessor != null) {
preprocessor.forReportPoint().transform(point);
if (!preprocessor.forReportPoint().filter(point, messageHolder)) {
if (messageHolder[0] != null) {
pointHandler.reject(point, messageHolder[0]);
} else {
pointHandler.block(point);
}
continue;
}
}
pointHandler.report(point);
}
writeHttpResponse(ctx, HttpResponseStatus.OK, output, request);
} catch (IOException e) {
logWarning("WF-300: Error processing incoming JSON request", e, ctx);
writeHttpResponse(ctx, HttpResponseStatus.INTERNAL_SERVER_ERROR, output, request);
}
}
Aggregations