use of com.google.common.collect.MapMaker in project druid by druid-io.
the class QueryResource method doPost.
@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(InputStream in, @QueryParam("pretty") String pretty, // used to get request content-type, remote address and AuthorizationInfo
@Context final HttpServletRequest req) throws IOException {
final long start = System.currentTimeMillis();
Query query = null;
QueryToolChest toolChest = null;
String queryId = null;
final ResponseContext context = createContext(req.getContentType(), pretty != null);
final String currThreadName = Thread.currentThread().getName();
try {
query = context.getObjectMapper().readValue(in, Query.class);
queryId = query.getId();
if (queryId == null) {
queryId = UUID.randomUUID().toString();
query = query.withId(queryId);
}
if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
query = query.withOverriddenContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, config.getMaxIdleTime().toStandardDuration().getMillis()));
}
toolChest = warehouse.getToolChest(query);
Thread.currentThread().setName(String.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId));
if (log.isDebugEnabled()) {
log.debug("Got query [%s]", query);
}
if (authConfig.isEnabled()) {
// This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424
AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN);
if (authorizationInfo != null) {
for (String dataSource : query.getDataSource().getNames()) {
Access authResult = authorizationInfo.isAuthorized(new Resource(dataSource, ResourceType.DATASOURCE), Action.READ);
if (!authResult.isAllowed()) {
return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build();
}
}
} else {
throw new ISE("WTF?! Security is enabled but no authorization info found in the request");
}
}
String prevEtag = req.getHeader(HDR_IF_NONE_MATCH);
if (prevEtag != null) {
query = query.withOverriddenContext(ImmutableMap.of(HDR_IF_NONE_MATCH, prevEtag));
}
final Map<String, Object> responseContext = new MapMaker().makeMap();
final Sequence res = query.run(texasRanger, responseContext);
if (prevEtag != null && prevEtag.equals(responseContext.get(HDR_ETAG))) {
return Response.notModified().build();
}
final Sequence results;
if (res == null) {
results = Sequences.empty();
} else {
results = res;
}
final Yielder yielder = Yielders.each(results);
try {
final Query theQuery = query;
final QueryToolChest theToolChest = toolChest;
final ObjectWriter jsonWriter = context.newOutputWriter();
Response.ResponseBuilder builder = Response.ok(new StreamingOutput() {
@Override
public void write(OutputStream outputStream) throws IOException, WebApplicationException {
try {
// json serializer will always close the yielder
CountingOutputStream os = new CountingOutputStream(outputStream);
jsonWriter.writeValue(os, yielder);
// Some types of OutputStream suppress flush errors in the .close() method.
os.flush();
os.close();
successfulQueryCount.incrementAndGet();
final long queryTime = System.currentTimeMillis() - start;
emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).setDimension("success", "true").build("query/time", queryTime));
emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).build("query/bytes", os.getCount()));
requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), theQuery, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "query/bytes", os.getCount(), "success", true))));
} finally {
Thread.currentThread().setName(currThreadName);
}
}
}, context.getContentType()).header("X-Druid-Query-Id", queryId);
if (responseContext.get(HDR_ETAG) != null) {
builder.header(HDR_ETAG, responseContext.get(HDR_ETAG));
responseContext.remove(HDR_ETAG);
}
//Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
//Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
//and encodes the string using ASCII, so 1 char is = 1 byte
String responseCtxString = jsonMapper.writeValueAsString(responseContext);
if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
log.warn("Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString);
responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
}
return builder.header("X-Druid-Response-Context", responseCtxString).build();
} catch (Exception e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder.close();
throw Throwables.propagate(e);
} finally {
// do not close yielder here, since we do not want to close the yielder prior to
// StreamingOutput having iterated over all the results
}
} catch (QueryInterruptedException e) {
try {
log.warn(e, "Exception while processing queryId [%s]", queryId);
interruptedQueryCount.incrementAndGet();
final long queryTime = System.currentTimeMillis() - start;
emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "interrupted", true, "reason", e.toString()))));
} catch (Exception e2) {
log.error(e2, "Unable to log query [%s]!", query);
}
return context.gotError(e);
} catch (Exception e) {
// Input stream has already been consumed by the json object mapper if query == null
final String queryString = query == null ? "unparsable query" : query.toString();
log.warn(e, "Exception occurred on request [%s]", queryString);
failedQueryCount.incrementAndGet();
try {
final long queryTime = System.currentTimeMillis() - start;
emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "exception", e.toString()))));
} catch (Exception e2) {
log.error(e2, "Unable to log query [%s]!", queryString);
}
log.makeAlert(e, "Exception handling request").addData("exception", e.toString()).addData("query", queryString).addData("peer", req.getRemoteAddr()).emit();
return context.gotError(e);
} finally {
Thread.currentThread().setName(currThreadName);
}
}
use of com.google.common.collect.MapMaker in project druid by druid-io.
the class RetryQueryRunnerTest method testRunWithMissingSegments.
@Test
public void testRunWithMissingSegments() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query query, Map context) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
return Sequences.empty();
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
@Override
public int getNumTries() {
return 0;
}
@Override
public boolean isReturnPartialResults() {
return true;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 1);
Assert.assertTrue("Should return an empty sequence as a result", ((List) actualResults).size() == 0);
}
use of com.google.common.collect.MapMaker in project druid by druid-io.
the class RetryQueryRunnerTest method testException.
@Test(expected = SegmentMissingException.class)
public void testException() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
return Sequences.empty();
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 1;
private boolean returnPartialResults = false;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 1);
}
use of com.google.common.collect.MapMaker in project druid by druid-io.
the class RetryQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put("count", 0);
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
if ((int) context.get("count") == 0) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
context.put("count", 1);
return Sequences.empty();
} else {
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
}
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 1;
private boolean returnPartialResults = true;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1);
Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
use of com.google.common.collect.MapMaker in project DirectMemory by raffaeleguidi.
the class MicroBenchmarks method pumpOffHeap.
/*
*
*
* ExecutorService executor = Executors.newCachedThreadPool();
Callable<Object> task = new Callable<Object>() {
public Object call() {
return something.blockingMethod();
}
}
Future<Object> future = executor.submit(task);
try {
Object result = future.get(5, TimeUnit.SECONDS);
} catch (TimeoutException ex) {
// handle the timeout
} finally {
future.cancel(); // may or may not desire this
}
*/
private void pumpOffHeap(int ops, byte[] payload) {
ConcurrentMap<String, ByteBuffer> test = new MapMaker().concurrencyLevel(4).makeMap();
logger.info(Ram.inMb(ops * payload.length) + " in " + ops + " slices to store");
ByteBuffer bulk = ByteBuffer.allocateDirect(ops * payload.length);
double started = System.currentTimeMillis();
for (int i = 0; i < ops; i++) {
bulk.position(i * payload.length);
final ByteBuffer buf = bulk.duplicate();
buf.put(payload);
test.put("test-" + i, buf);
}
double finished = System.currentTimeMillis();
logger.info("done in " + (finished - started) / 1000 + " seconds");
for (ByteBuffer buf : test.values()) {
buf.clear();
}
}
Aggregations