use of org.apache.druid.java.util.emitter.service.ServiceEventBuilder in project druid by druid-io.
the class InfluxdbEmitterTest method testTransformForInfluxWithShortMetric.
@Test
public void testTransformForInfluxWithShortMetric() {
DateTime date = new DateTime(2017, 10, 30, 10, 00, // 10:00am on 30/10/2017 = 1509357600000000000 in epoch nanoseconds
DateTimeZone.UTC);
String metric = "metric/time";
Number value = 1234;
ImmutableMap<String, String> serviceDims = ImmutableMap.of("service", "druid/historical", "host", "localhost", "version", "0.10.0");
ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder();
ServiceEventBuilder eventBuilder = builder.build(date, metric, value);
ServiceMetricEvent event = (ServiceMetricEvent) eventBuilder.build(serviceDims);
InfluxdbEmitterConfig config = new InfluxdbEmitterConfig("localhost", 8086, null, null, null, null, "dbname", 10000, 15000, 30000, "adam", "password", null);
InfluxdbEmitter influxdbEmitter = new InfluxdbEmitter(config);
String expected = "druid_metric,service=druid/historical,hostname=localhost druid_time=1234 1509357600000000000" + "\n";
String actual = influxdbEmitter.transformForInfluxSystems(event);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.java.util.emitter.service.ServiceEventBuilder in project druid by druid-io.
the class KillCompactionConfigTest method testRunRemoveInactiveDatasourceCompactionConfig.
@Test
public void testRunRemoveInactiveDatasourceCompactionConfig() {
String inactiveDatasourceName = "inactive_datasource";
String activeDatasourceName = "active_datasource";
DataSourceCompactionConfig inactiveDatasourceConfig = new DataSourceCompactionConfig(inactiveDatasourceName, null, 500L, null, new Period(3600), null, new UserCompactionTaskGranularityConfig(Granularities.HOUR, null, null), null, null, null, null, ImmutableMap.of("key", "val"));
DataSourceCompactionConfig activeDatasourceConfig = new DataSourceCompactionConfig(activeDatasourceName, null, 500L, null, new Period(3600), null, new UserCompactionTaskGranularityConfig(Granularities.HOUR, null, null), null, null, null, null, ImmutableMap.of("key", "val"));
CoordinatorCompactionConfig originalCurrentConfig = CoordinatorCompactionConfig.from(ImmutableList.of(inactiveDatasourceConfig, activeDatasourceConfig));
byte[] originalCurrentConfigBytes = { 1, 2, 3 };
Mockito.when(mockConnector.lookup(ArgumentMatchers.anyString(), ArgumentMatchers.eq("name"), ArgumentMatchers.eq("payload"), ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY))).thenReturn(originalCurrentConfigBytes);
Mockito.when(mockJacksonConfigManager.convertByteToConfig(ArgumentMatchers.eq(originalCurrentConfigBytes), ArgumentMatchers.eq(CoordinatorCompactionConfig.class), ArgumentMatchers.eq(CoordinatorCompactionConfig.empty()))).thenReturn(originalCurrentConfig);
Mockito.when(mockDruidCoordinatorRuntimeParams.getEmitter()).thenReturn(mockServiceEmitter);
Mockito.when(mockSqlSegmentsMetadataManager.retrieveAllDataSourceNames()).thenReturn(ImmutableSet.of(activeDatasourceName));
final ArgumentCaptor<byte[]> oldConfigCaptor = ArgumentCaptor.forClass(byte[].class);
final ArgumentCaptor<CoordinatorCompactionConfig> newConfigCaptor = ArgumentCaptor.forClass(CoordinatorCompactionConfig.class);
Mockito.when(mockJacksonConfigManager.set(ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY), oldConfigCaptor.capture(), newConfigCaptor.capture(), ArgumentMatchers.any())).thenReturn(ConfigManager.SetResult.ok());
TestDruidCoordinatorConfig druidCoordinatorConfig = new TestDruidCoordinatorConfig(null, null, null, new Duration("PT5S"), null, null, null, null, null, null, null, new Duration("PT6S"), null, null, null, null, 10, null);
killCompactionConfig = new KillCompactionConfig(druidCoordinatorConfig, mockSqlSegmentsMetadataManager, mockJacksonConfigManager, mockConnector, mockConnectorConfig);
killCompactionConfig.run(mockDruidCoordinatorRuntimeParams);
// Verify and Assert
Assert.assertNotNull(oldConfigCaptor.getValue());
Assert.assertEquals(oldConfigCaptor.getValue(), originalCurrentConfigBytes);
Assert.assertNotNull(newConfigCaptor.getValue());
// The updated config should only contains one compaction config for the active datasource
Assert.assertEquals(1, newConfigCaptor.getValue().getCompactionConfigs().size());
Assert.assertEquals(activeDatasourceConfig, newConfigCaptor.getValue().getCompactionConfigs().get(0));
final ArgumentCaptor<ServiceEventBuilder> emittedEventCaptor = ArgumentCaptor.forClass(ServiceEventBuilder.class);
Mockito.verify(mockServiceEmitter).emit(emittedEventCaptor.capture());
Assert.assertEquals(KillCompactionConfig.COUNT_METRIC, emittedEventCaptor.getValue().build(ImmutableMap.of()).toMap().get("metric"));
// Should delete 1 config
Assert.assertEquals(1, emittedEventCaptor.getValue().build(ImmutableMap.of()).toMap().get("value"));
Mockito.verify(mockJacksonConfigManager).convertByteToConfig(ArgumentMatchers.eq(originalCurrentConfigBytes), ArgumentMatchers.eq(CoordinatorCompactionConfig.class), ArgumentMatchers.eq(CoordinatorCompactionConfig.empty()));
Mockito.verify(mockConnector).lookup(ArgumentMatchers.anyString(), ArgumentMatchers.eq("name"), ArgumentMatchers.eq("payload"), ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY));
Mockito.verify(mockJacksonConfigManager).set(ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY), ArgumentMatchers.any(byte[].class), ArgumentMatchers.any(CoordinatorCompactionConfig.class), ArgumentMatchers.any());
Mockito.verifyNoMoreInteractions(mockJacksonConfigManager);
Mockito.verify(mockSqlSegmentsMetadataManager).retrieveAllDataSourceNames();
Mockito.verifyNoMoreInteractions(mockSqlSegmentsMetadataManager);
}
use of org.apache.druid.java.util.emitter.service.ServiceEventBuilder in project druid by druid-io.
the class KillCompactionConfigTest method testRunDoNothingIfCurrentConfigIsEmpty.
@Test
public void testRunDoNothingIfCurrentConfigIsEmpty() {
Mockito.when(mockDruidCoordinatorRuntimeParams.getEmitter()).thenReturn(mockServiceEmitter);
// Set current compaction config to an empty compaction config
Mockito.when(mockConnector.lookup(ArgumentMatchers.anyString(), ArgumentMatchers.eq("name"), ArgumentMatchers.eq("payload"), ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY))).thenReturn(null);
Mockito.when(mockJacksonConfigManager.convertByteToConfig(ArgumentMatchers.eq(null), ArgumentMatchers.eq(CoordinatorCompactionConfig.class), ArgumentMatchers.eq(CoordinatorCompactionConfig.empty()))).thenReturn(CoordinatorCompactionConfig.empty());
TestDruidCoordinatorConfig druidCoordinatorConfig = new TestDruidCoordinatorConfig(null, null, null, new Duration("PT5S"), null, null, null, null, null, null, null, new Duration("PT6S"), null, null, null, null, 10, null);
killCompactionConfig = new KillCompactionConfig(druidCoordinatorConfig, mockSqlSegmentsMetadataManager, mockJacksonConfigManager, mockConnector, mockConnectorConfig);
killCompactionConfig.run(mockDruidCoordinatorRuntimeParams);
Mockito.verifyNoInteractions(mockSqlSegmentsMetadataManager);
final ArgumentCaptor<ServiceEventBuilder> emittedEventCaptor = ArgumentCaptor.forClass(ServiceEventBuilder.class);
Mockito.verify(mockServiceEmitter).emit(emittedEventCaptor.capture());
Assert.assertEquals(KillCompactionConfig.COUNT_METRIC, emittedEventCaptor.getValue().build(ImmutableMap.of()).toMap().get("metric"));
Assert.assertEquals(0, emittedEventCaptor.getValue().build(ImmutableMap.of()).toMap().get("value"));
Mockito.verify(mockJacksonConfigManager).convertByteToConfig(ArgumentMatchers.eq(null), ArgumentMatchers.eq(CoordinatorCompactionConfig.class), ArgumentMatchers.eq(CoordinatorCompactionConfig.empty()));
Mockito.verify(mockConnector).lookup(ArgumentMatchers.anyString(), ArgumentMatchers.eq("name"), ArgumentMatchers.eq("payload"), ArgumentMatchers.eq(CoordinatorCompactionConfig.CONFIG_KEY));
Mockito.verifyNoMoreInteractions(mockJacksonConfigManager);
}
use of org.apache.druid.java.util.emitter.service.ServiceEventBuilder in project druid by druid-io.
the class HistoricalMetricsMonitorTest method testSimple.
@Test
public void testSimple() {
final long size = 5;
final String dataSource = "dataSource";
final DataSegment dataSegment = new DataSegment(dataSource, Intervals.of("2014/2015"), "version", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), null, 1, size);
final long maxSize = 10;
final int priority = 111;
final String tier = "tier";
EasyMock.expect(druidServerConfig.getMaxSize()).andReturn(maxSize).once();
EasyMock.expect(segmentLoadDropMgr.getPendingDeleteSnapshot()).andReturn(ImmutableList.of(dataSegment)).once();
EasyMock.expect(druidServerConfig.getTier()).andReturn(tier).once();
EasyMock.expect(druidServerConfig.getPriority()).andReturn(priority).once();
EasyMock.expect(segmentManager.getDataSourceSizes()).andReturn(ImmutableMap.of(dataSource, size));
EasyMock.expect(druidServerConfig.getTier()).andReturn(tier).once();
EasyMock.expect(druidServerConfig.getPriority()).andReturn(priority).once();
EasyMock.expect(druidServerConfig.getMaxSize()).andReturn(maxSize).times(2);
EasyMock.expect(segmentManager.getDataSourceCounts()).andReturn(ImmutableMap.of(dataSource, 1L));
EasyMock.expect(druidServerConfig.getTier()).andReturn(tier).once();
EasyMock.expect(druidServerConfig.getPriority()).andReturn(priority).once();
final HistoricalMetricsMonitor monitor = new HistoricalMetricsMonitor(druidServerConfig, segmentManager, segmentLoadDropMgr);
final Capture<ServiceEventBuilder<ServiceMetricEvent>> eventCapture = EasyMock.newCapture(CaptureType.ALL);
serviceEmitter.emit(EasyMock.capture(eventCapture));
EasyMock.expectLastCall().times(5);
EasyMock.replay(druidServerConfig, segmentManager, segmentLoadDropMgr, serviceEmitter);
monitor.doMonitor(serviceEmitter);
EasyMock.verify(druidServerConfig, segmentManager, segmentLoadDropMgr, serviceEmitter);
final String host = "host";
final String service = "service";
Assert.assertTrue(eventCapture.hasCaptured());
final List<Map<String, Object>> events = Lists.transform(eventCapture.getValues(), new Function<ServiceEventBuilder<ServiceMetricEvent>, Map<String, Object>>() {
@Nullable
@Override
public Map<String, Object> apply(@Nullable ServiceEventBuilder<ServiceMetricEvent> input) {
final HashMap<String, Object> map = new HashMap<>(input.build(service, host).toMap());
Assert.assertNotNull(map.remove("feed"));
Assert.assertNotNull(map.remove("timestamp"));
Assert.assertNotNull(map.remove("service"));
Assert.assertNotNull(map.remove("host"));
return map;
}
});
Assert.assertEquals(ImmutableMap.<String, Object>of("metric", "segment/max", "value", maxSize), events.get(0));
Assert.assertEquals(ImmutableMap.<String, Object>of("dataSource", dataSource, "metric", "segment/pendingDelete", "priority", String.valueOf(priority), "tier", tier, "value", dataSegment.getSize()), events.get(1));
Assert.assertEquals(ImmutableMap.<String, Object>of("metric", "segment/used", "value", dataSegment.getSize(), "tier", tier, "priority", String.valueOf(priority), "dataSource", dataSource), events.get(2));
Assert.assertEquals(ImmutableMap.<String, Object>of("metric", "segment/usedPercent", "value", dataSegment.getSize() * 1.0D / maxSize, "tier", tier, "priority", String.valueOf(priority), "dataSource", dataSource), events.get(3));
Assert.assertEquals(ImmutableMap.<String, Object>of("metric", "segment/count", "value", 1L, "tier", tier, "priority", String.valueOf(priority), "dataSource", dataSource), events.get(4));
}
use of org.apache.druid.java.util.emitter.service.ServiceEventBuilder in project druid by druid-io.
the class StorageLocationTest method testMaybeReserve.
@Test
public void testMaybeReserve() throws IOException {
ServiceEmitter emitter = Mockito.mock(ServiceEmitter.class);
ArgumentCaptor<ServiceEventBuilder> argumentCaptor = ArgumentCaptor.forClass(ServiceEventBuilder.class);
EmittingLogger.registerEmitter(emitter);
File dir = temporaryFolder.newFolder();
long expectedAvail = 1000L;
StorageLocation loc = new StorageLocation(dir, expectedAvail, null);
verifyLoc(expectedAvail, loc);
final DataSegment secondSegment = makeSegment("2012-01-02/2012-01-03", 23);
loc.maybeReserve("test1", makeSegment("2012-01-01/2012-01-02", 10));
expectedAvail -= 10;
verifyLoc(expectedAvail, loc);
loc.maybeReserve("test1", makeSegment("2012-01-01/2012-01-02", 10));
verifyLoc(expectedAvail, loc);
loc.maybeReserve("test2", secondSegment);
expectedAvail -= 23;
verifyLoc(expectedAvail, loc);
loc.removeSegmentDir(new File(dir, "test1"), makeSegment("2012-01-01/2012-01-02", 10));
expectedAvail += 10;
verifyLoc(expectedAvail, loc);
loc.maybeReserve("test3", makeSegment("2012-01-01/2012-01-02", 999));
expectedAvail -= 999;
verifyLoc(expectedAvail, loc);
Mockito.verify(emitter).emit(argumentCaptor.capture());
AlertBuilder alertBuilder = (AlertBuilder) argumentCaptor.getValue();
String description = alertBuilder.build(ImmutableMap.of()).getDescription();
Assert.assertNotNull(description);
Assert.assertTrue(description, description.contains("Please increase druid.segmentCache.locations maxSize param"));
}
Aggregations