use of org.apache.druid.server.QueryStats in project druid by druid-io.
the class DefaultRequestLogEventTest method testDefaultRequestLogEventToMapSQL.
@Test
public void testDefaultRequestLogEventToMapSQL() {
final String feed = "test";
final DateTime timestamp = DateTimes.of(2019, 12, 12, 3, 1);
final String service = "druid-service";
final String host = "127.0.0.1";
final String sql = "select * from 1337";
final QueryStats queryStats = new QueryStats(ImmutableMap.of("sqlQuery/time", 13L, "sqlQuery/bytes", 10L, "success", true, "identity", "allowAll"));
RequestLogLine nativeLine = RequestLogLine.forSql(sql, ImmutableMap.of(), timestamp, host, queryStats);
DefaultRequestLogEvent defaultRequestLogEvent = new DefaultRequestLogEvent(ImmutableMap.of("service", service, "host", host), feed, nativeLine);
final Map<String, Object> expected = new HashMap<>();
expected.put("feed", feed);
expected.put("timestamp", timestamp);
expected.put("service", service);
expected.put("host", host);
expected.put("sql", sql);
expected.put("sqlQueryContext", ImmutableMap.of());
expected.put("remoteAddr", host);
expected.put("queryStats", queryStats);
Assert.assertEquals(expected, defaultRequestLogEvent.toMap());
}
use of org.apache.druid.server.QueryStats in project druid by druid-io.
the class DefaultRequestLogEventTest method testDefaultRequestLogEventToMap.
@Test
public void testDefaultRequestLogEventToMap() {
final String feed = "test";
final DateTime timestamp = DateTimes.of(2019, 12, 12, 3, 1);
final String service = "druid-service";
final String host = "127.0.0.1";
final Query query = new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), true, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(), ImmutableList.of(), 5, ImmutableMap.of("key", "value"));
final QueryStats queryStats = new QueryStats(ImmutableMap.of("query/time", 13L, "query/bytes", 10L, "success", true, "identity", "allowAll"));
RequestLogLine nativeLine = RequestLogLine.forNative(query, timestamp, host, queryStats);
DefaultRequestLogEvent defaultRequestLogEvent = new DefaultRequestLogEvent(ImmutableMap.of("service", service, "host", host), feed, nativeLine);
final Map<String, Object> expected = new HashMap<>();
expected.put("feed", feed);
expected.put("timestamp", timestamp);
expected.put("service", service);
expected.put("host", host);
expected.put("query", query);
expected.put("remoteAddr", host);
expected.put("queryStats", queryStats);
Assert.assertEquals(expected, defaultRequestLogEvent.toMap());
}
use of org.apache.druid.server.QueryStats in project druid by druid-io.
the class FilteredRequestLoggerTest method testNotFilterAboveThresholdSkipSegmentMetadata.
@Test
public void testNotFilterAboveThresholdSkipSegmentMetadata() throws IOException {
RequestLogger delegate = EasyMock.createStrictMock(RequestLogger.class);
delegate.logNativeQuery(EasyMock.anyObject());
EasyMock.expectLastCall().andThrow(new IOException());
delegate.logSqlQuery(EasyMock.anyObject());
EasyMock.expectLastCall().andThrow(new IOException());
FilteredRequestLoggerProvider.FilteredRequestLogger logger = new FilteredRequestLoggerProvider.FilteredRequestLogger(delegate, 1000, 2000, ImmutableList.of(Query.SEGMENT_METADATA));
RequestLogLine nativeRequestLogLine = EasyMock.createMock(RequestLogLine.class);
EasyMock.expect(nativeRequestLogLine.getQueryStats()).andReturn(new QueryStats(ImmutableMap.of("query/time", 10000))).once();
EasyMock.expect(nativeRequestLogLine.getQuery()).andReturn(testSegmentMetadataQuery).once();
RequestLogLine sqlRequestLogLine = EasyMock.createMock(RequestLogLine.class);
EasyMock.expect(sqlRequestLogLine.getQueryStats()).andReturn(new QueryStats(ImmutableMap.of("sqlQuery/time", 10000))).once();
EasyMock.expect(sqlRequestLogLine.getQuery()).andReturn(testSegmentMetadataQuery).once();
EasyMock.replay(nativeRequestLogLine, sqlRequestLogLine, delegate);
logger.logNativeQuery(nativeRequestLogLine);
logger.logSqlQuery(sqlRequestLogLine);
}
Aggregations