use of org.apache.druid.java.util.emitter.service.ServiceMetricEvent in project druid by druid-io.
the class InfluxdbEmitterTest method setUp.
@Before
public void setUp() {
DateTime date = new DateTime(2017, 10, 30, 10, 00, // 10:00am on 30/10/2017 = 1509357600000000000 in epoch nanoseconds
DateTimeZone.UTC);
String metric = "metric/te/st/value";
Number value = 1234;
ImmutableMap<String, String> serviceDims = ImmutableMap.of("service", "druid/historical", "host", "localhost", "version", "0.10.0");
ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder();
builder.setDimension("nonWhiteListedDim", "test");
builder.setDimension("dataSource", "test_datasource");
ServiceEventBuilder eventBuilder = builder.build(date, metric, value);
event = (ServiceMetricEvent) eventBuilder.build(serviceDims);
}
use of org.apache.druid.java.util.emitter.service.ServiceMetricEvent in project druid by druid-io.
the class InfluxdbEmitterTest method testMetricIsInDimensionWhitelist.
@Test
public void testMetricIsInDimensionWhitelist() {
DateTime date = new DateTime(2017, 10, 30, 10, 00, // 10:00am on 30/10/2017 = 1509357600000000000 in epoch nanoseconds
DateTimeZone.UTC);
String metric = "metric/time";
Number value = 1234;
ImmutableMap<String, String> serviceDims = ImmutableMap.of("service", "druid/historical", "host", "localhost", "version", "0.10.0");
ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder();
ServiceEventBuilder eventBuilder = builder.build(date, metric, value);
builder.setDimension("dataSource", "wikipedia");
builder.setDimension("taskType", "index");
ServiceMetricEvent event = (ServiceMetricEvent) eventBuilder.build(serviceDims);
InfluxdbEmitterConfig config = new InfluxdbEmitterConfig("localhost", 8086, null, null, null, null, "dbname", 10000, 15000, 30000, "adam", "password", ImmutableSet.of("dataSource"));
InfluxdbEmitter influxdbEmitter = new InfluxdbEmitter(config);
String expected = "druid_metric,service=druid/historical,hostname=localhost,dataSource=wikipedia druid_time=1234 1509357600000000000" + "\n";
String actual = influxdbEmitter.transformForInfluxSystems(event);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.java.util.emitter.service.ServiceMetricEvent in project druid by druid-io.
the class KafkaEmitterTest method testKafkaEmitter.
// there is 1 seconds wait in kafka emitter before it starts sending events to broker, set a timeout for 5 seconds
@Test(timeout = 5_000)
public void testKafkaEmitter() throws InterruptedException {
final List<ServiceMetricEvent> serviceMetricEvents = ImmutableList.of(ServiceMetricEvent.builder().build("m1", 1).build("service", "host"));
final List<AlertEvent> alertEvents = ImmutableList.of(new AlertEvent("service", "host", "description"));
final List<RequestLogEvent> requestLogEvents = ImmutableList.of(DefaultRequestLogEventBuilderFactory.instance().createRequestLogEventBuilder("requests", RequestLogLine.forSql("", null, DateTimes.nowUtc(), null, new QueryStats(ImmutableMap.of()))).build("service", "host"));
int totalEvents = serviceMetricEvents.size() + alertEvents.size() + requestLogEvents.size();
int totalEventsExcludingRequestLogEvents = totalEvents - requestLogEvents.size();
final CountDownLatch countDownSentEvents = new CountDownLatch(requestTopic == null ? totalEventsExcludingRequestLogEvents : totalEvents);
final KafkaProducer<String, String> producer = mock(KafkaProducer.class);
final KafkaEmitter kafkaEmitter = new KafkaEmitter(new KafkaEmitterConfig("", "metrics", "alerts", requestTopic, "test-cluster", null), new ObjectMapper()) {
@Override
protected Producer<String, String> setKafkaProducer() {
// override send interval to 1 second
sendInterval = 1;
return producer;
}
};
when(producer.send(any(), any())).then((invocation) -> {
countDownSentEvents.countDown();
return null;
});
kafkaEmitter.start();
for (Event event : serviceMetricEvents) {
kafkaEmitter.emit(event);
}
for (Event event : alertEvents) {
kafkaEmitter.emit(event);
}
for (Event event : requestLogEvents) {
kafkaEmitter.emit(event);
}
countDownSentEvents.await();
Assert.assertEquals(0, kafkaEmitter.getMetricLostCount());
Assert.assertEquals(0, kafkaEmitter.getAlertLostCount());
Assert.assertEquals(requestTopic == null ? requestLogEvents.size() : 0, kafkaEmitter.getRequestLostCount());
Assert.assertEquals(0, kafkaEmitter.getInvalidLostCount());
}
use of org.apache.druid.java.util.emitter.service.ServiceMetricEvent in project druid by druid-io.
the class OpenTelemetryEmitter method emitQueryTimeEvent.
private void emitQueryTimeEvent(ServiceMetricEvent event) {
Context opentelemetryContext = propagator.extract(Context.current(), event, DRUID_CONTEXT_TEXT_MAP_GETTER);
try (Scope scope = opentelemetryContext.makeCurrent()) {
DateTime endTime = event.getCreatedTime();
DateTime startTime = endTime.minusMillis(event.getValue().intValue());
Span span = tracer.spanBuilder(event.getService()).setStartTimestamp(startTime.getMillis(), TimeUnit.MILLISECONDS).startSpan();
getContext(event).entrySet().stream().filter(entry -> entry.getValue() != null).filter(entry -> !TRACEPARENT_PROPAGATION_FIELDS.contains(entry.getKey())).forEach(entry -> span.setAttribute(entry.getKey(), entry.getValue().toString()));
Object status = event.getUserDims().get("success");
if (status == null) {
span.setStatus(StatusCode.UNSET);
} else if (status.toString().equals("true")) {
span.setStatus(StatusCode.OK);
} else {
span.setStatus(StatusCode.ERROR);
}
span.end(endTime.getMillis(), TimeUnit.MILLISECONDS);
}
}
use of org.apache.druid.java.util.emitter.service.ServiceMetricEvent in project druid by druid-io.
the class OpenTelemetryEmitter method emit.
@Override
public void emit(Event e) {
if (!(e instanceof ServiceMetricEvent)) {
return;
}
ServiceMetricEvent event = (ServiceMetricEvent) e;
// query/time
if (!event.getMetric().equals("query/time")) {
return;
}
emitQueryTimeEvent(event);
}
Aggregations