Search in sources :

Example 1 with SqlLifecycle

use of org.apache.druid.sql.SqlLifecycle in project druid by druid-io.

the class BaseCalciteQueryTest method analyzeResources.

public Set<ResourceAction> analyzeResources(PlannerConfig plannerConfig, String sql, AuthenticationResult authenticationResult) {
    SqlLifecycleFactory lifecycleFactory = getSqlLifecycleFactory(plannerConfig, createOperatorTable(), createMacroTable(), CalciteTests.TEST_AUTHORIZER_MAPPER, queryJsonMapper);
    SqlLifecycle lifecycle = lifecycleFactory.factorize();
    lifecycle.initialize(sql, ImmutableMap.of());
    return lifecycle.runAnalyzeResources(authenticationResult).getResourceActions();
}
Also used : SqlLifecycle(org.apache.druid.sql.SqlLifecycle) SqlLifecycleFactory(org.apache.druid.sql.SqlLifecycleFactory)

Example 2 with SqlLifecycle

use of org.apache.druid.sql.SqlLifecycle in project druid by druid-io.

the class SqlResource method doPost.

@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(final SqlQuery sqlQuery, @Context final HttpServletRequest req) throws IOException {
    final SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
    final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), sqlQuery.getContext());
    final String remoteAddr = req.getRemoteAddr();
    final String currThreadName = Thread.currentThread().getName();
    try {
        Thread.currentThread().setName(StringUtils.format("sql[%s]", sqlQueryId));
        lifecycle.setParameters(sqlQuery.getParameterList());
        lifecycle.validateAndAuthorize(req);
        // must add after lifecycle is authorized
        sqlLifecycleManager.add(sqlQueryId, lifecycle);
        lifecycle.plan();
        final SqlRowTransformer rowTransformer = lifecycle.createRowTransformer();
        final Sequence<Object[]> sequence = lifecycle.execute();
        final Yielder<Object[]> yielder0 = Yielders.each(sequence);
        try {
            final Response.ResponseBuilder responseBuilder = Response.ok((StreamingOutput) outputStream -> {
                Exception e = null;
                CountingOutputStream os = new CountingOutputStream(outputStream);
                Yielder<Object[]> yielder = yielder0;
                try (final ResultFormat.Writer writer = sqlQuery.getResultFormat().createFormatter(os, jsonMapper)) {
                    writer.writeResponseStart();
                    if (sqlQuery.includeHeader()) {
                        writer.writeHeader(rowTransformer.getRowType(), sqlQuery.includeTypesHeader(), sqlQuery.includeSqlTypesHeader());
                    }
                    while (!yielder.isDone()) {
                        final Object[] row = yielder.get();
                        writer.writeRowStart();
                        for (int i = 0; i < rowTransformer.getFieldList().size(); i++) {
                            final Object value = rowTransformer.transform(row, i);
                            writer.writeRowField(rowTransformer.getFieldList().get(i), value);
                        }
                        writer.writeRowEnd();
                        yielder = yielder.next(null);
                    }
                    writer.writeResponseEnd();
                } catch (Exception ex) {
                    e = ex;
                    log.error(ex, "Unable to send SQL response [%s]", sqlQueryId);
                    throw new RuntimeException(ex);
                } finally {
                    yielder.close();
                    endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, os.getCount());
                }
            }).header(SQL_QUERY_ID_RESPONSE_HEADER, sqlQueryId);
            if (sqlQuery.includeHeader()) {
                responseBuilder.header(SQL_HEADER_RESPONSE_HEADER, SQL_HEADER_VALUE);
            }
            return responseBuilder.build();
        } catch (Throwable e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder0.close();
            throw new RuntimeException(e);
        }
    } catch (QueryCapacityExceededException cap) {
        endLifecycle(sqlQueryId, lifecycle, cap, remoteAddr, -1);
        return buildNonOkResponse(QueryCapacityExceededException.STATUS_CODE, cap, sqlQueryId);
    } catch (QueryUnsupportedException unsupported) {
        endLifecycle(sqlQueryId, lifecycle, unsupported, remoteAddr, -1);
        return buildNonOkResponse(QueryUnsupportedException.STATUS_CODE, unsupported, sqlQueryId);
    } catch (QueryTimeoutException timeout) {
        endLifecycle(sqlQueryId, lifecycle, timeout, remoteAddr, -1);
        return buildNonOkResponse(QueryTimeoutException.STATUS_CODE, timeout, sqlQueryId);
    } catch (SqlPlanningException | ResourceLimitExceededException e) {
        endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
        return buildNonOkResponse(BadQueryException.STATUS_CODE, e, sqlQueryId);
    } catch (ForbiddenException e) {
        endLifecycleWithoutEmittingMetrics(sqlQueryId, lifecycle);
        throw (ForbiddenException) serverConfig.getErrorResponseTransformStrategy().transformIfNeeded(// let ForbiddenExceptionMapper handle this
        e);
    } catch (RelOptPlanner.CannotPlanException e) {
        endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
        SqlPlanningException spe = new SqlPlanningException(SqlPlanningException.PlanningError.UNSUPPORTED_SQL_ERROR, e.getMessage());
        return buildNonOkResponse(BadQueryException.STATUS_CODE, spe, sqlQueryId);
    }// calcite throws a java.lang.AssertionError which is type error not exception. using throwable will catch all
     catch (Throwable e) {
        log.warn(e, "Failed to handle query: %s", sqlQuery);
        endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
        return buildNonOkResponse(Status.INTERNAL_SERVER_ERROR.getStatusCode(), QueryInterruptedException.wrapIfNeeded(e), sqlQueryId);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : SqlRowTransformer(org.apache.druid.sql.SqlRowTransformer) StreamingOutput(javax.ws.rs.core.StreamingOutput) RelOptPlanner(org.apache.calcite.plan.RelOptPlanner) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) CountingOutputStream(com.google.common.io.CountingOutputStream) SqlPlanningException(org.apache.druid.sql.SqlPlanningException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) SqlLifecycle(org.apache.druid.sql.SqlLifecycle) BadQueryException(org.apache.druid.query.BadQueryException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) SqlPlanningException(org.apache.druid.sql.SqlPlanningException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) SanitizableException(org.apache.druid.common.exception.SanitizableException) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) Response(javax.ws.rs.core.Response) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 3 with SqlLifecycle

use of org.apache.druid.sql.SqlLifecycle in project druid by druid-io.

the class SqlResourceTest method setUp.

@Before
public void setUp() throws Exception {
    final QueryScheduler scheduler = new QueryScheduler(5, ManualQueryPrioritizationStrategy.INSTANCE, new HiLoQueryLaningStrategy(40), new ServerConfig()) {

        @Override
        public <T> Sequence<T> run(Query<?> query, Sequence<T> resultSequence) {
            return super.run(query, new LazySequence<T>(() -> {
                if (sleep) {
                    try {
                        // pretend to be a query that is waiting on results
                        Thread.sleep(500);
                    } catch (InterruptedException ignored) {
                    }
                }
                return resultSequence;
            }));
        }
    };
    executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(8, "test_sql_resource_%s"));
    walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder(), scheduler);
    final PlannerConfig plannerConfig = new PlannerConfig() {

        @Override
        public boolean shouldSerializeComplexValues() {
            return false;
        }
    };
    final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, CalciteTests.TEST_AUTHORIZER_MAPPER);
    final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
    final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
    req = EasyMock.createStrictMock(HttpServletRequest.class);
    EasyMock.expect(req.getRemoteAddr()).andReturn(null).once();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).anyTimes();
    req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
    EasyMock.expectLastCall().anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).anyTimes();
    EasyMock.replay(req);
    testRequestLogger = new TestRequestLogger();
    final PlannerFactory plannerFactory = new PlannerFactory(rootSchema, CalciteTests.createMockQueryMakerFactory(walker, conglomerate), operatorTable, macroTable, plannerConfig, CalciteTests.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper(), CalciteTests.DRUID_SCHEMA_NAME);
    lifecycleManager = new SqlLifecycleManager() {

        @Override
        public void add(String sqlQueryId, SqlLifecycle lifecycle) {
            super.add(sqlQueryId, lifecycle);
            if (lifecycleAddLatch != null) {
                lifecycleAddLatch.countDown();
            }
        }
    };
    final ServiceEmitter emitter = new NoopServiceEmitter();
    sqlLifecycleFactory = new SqlLifecycleFactory(plannerFactory, emitter, testRequestLogger, scheduler) {

        @Override
        public SqlLifecycle factorize() {
            return new TestSqlLifecycle(plannerFactory, emitter, testRequestLogger, scheduler, System.currentTimeMillis(), System.nanoTime(), validateAndAuthorizeLatchSupplier, planLatchSupplier, executeLatchSupplier, sequenceMapFnSupplier);
        }
    };
    resource = new SqlResource(JSON_MAPPER, CalciteTests.TEST_AUTHORIZER_MAPPER, sqlLifecycleFactory, lifecycleManager, new ServerConfig());
}
Also used : SqlLifecycleManager(org.apache.druid.sql.SqlLifecycleManager) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) QueryScheduler(org.apache.druid.server.QueryScheduler) BaseQuery(org.apache.druid.query.BaseQuery) Query(org.apache.druid.query.Query) HiLoQueryLaningStrategy(org.apache.druid.server.scheduling.HiLoQueryLaningStrategy) SqlLifecycle(org.apache.druid.sql.SqlLifecycle) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) Sequence(org.apache.druid.java.util.common.guava.Sequence) LazySequence(org.apache.druid.java.util.common.guava.LazySequence) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) DruidOperatorTable(org.apache.druid.sql.calcite.planner.DruidOperatorTable) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) TestRequestLogger(org.apache.druid.server.log.TestRequestLogger) HttpServletRequest(javax.servlet.http.HttpServletRequest) ServerConfig(org.apache.druid.server.initialization.ServerConfig) PlannerConfig(org.apache.druid.sql.calcite.planner.PlannerConfig) DruidSchemaCatalog(org.apache.druid.sql.calcite.schema.DruidSchemaCatalog) PlannerFactory(org.apache.druid.sql.calcite.planner.PlannerFactory) SqlLifecycleFactory(org.apache.druid.sql.SqlLifecycleFactory) Before(org.junit.Before)

Example 4 with SqlLifecycle

use of org.apache.druid.sql.SqlLifecycle in project druid by druid-io.

the class SqlResource method cancelQuery.

@DELETE
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response cancelQuery(@PathParam("id") String sqlQueryId, @Context final HttpServletRequest req) {
    log.debug("Received cancel request for query [%s]", sqlQueryId);
    List<SqlLifecycle> lifecycles = sqlLifecycleManager.getAll(sqlQueryId);
    if (lifecycles.isEmpty()) {
        return Response.status(Status.NOT_FOUND).build();
    }
    Set<ResourceAction> resources = lifecycles.stream().flatMap(lifecycle -> lifecycle.getRequiredResourceActions().stream()).collect(Collectors.toSet());
    Access access = AuthorizationUtils.authorizeAllResourceActions(req, resources, authorizerMapper);
    if (access.isAllowed()) {
        // should remove only the lifecycles in the snapshot.
        sqlLifecycleManager.removeAll(sqlQueryId, lifecycles);
        lifecycles.forEach(SqlLifecycle::cancel);
        return Response.status(Status.ACCEPTED).build();
    } else {
        return Response.status(Status.FORBIDDEN).build();
    }
}
Also used : Logger(org.apache.druid.java.util.common.logger.Logger) SqlLifecycle(org.apache.druid.sql.SqlLifecycle) PathParam(javax.ws.rs.PathParam) Produces(javax.ws.rs.Produces) AuthorizerMapper(org.apache.druid.server.security.AuthorizerMapper) Inject(com.google.inject.Inject) CountingOutputStream(com.google.common.io.CountingOutputStream) BadQueryException(org.apache.druid.query.BadQueryException) Path(javax.ws.rs.Path) Yielders(org.apache.druid.java.util.common.guava.Yielders) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) HttpServletRequest(javax.servlet.http.HttpServletRequest) MediaType(javax.ws.rs.core.MediaType) Consumes(javax.ws.rs.Consumes) SqlPlanningException(org.apache.druid.sql.SqlPlanningException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) Yielder(org.apache.druid.java.util.common.guava.Yielder) Status(javax.ws.rs.core.Response.Status) Nullable(javax.annotation.Nullable) DELETE(javax.ws.rs.DELETE) SanitizableException(org.apache.druid.common.exception.SanitizableException) Sequence(org.apache.druid.java.util.common.guava.Sequence) Access(org.apache.druid.server.security.Access) POST(javax.ws.rs.POST) Context(javax.ws.rs.core.Context) ServerConfig(org.apache.druid.server.initialization.ServerConfig) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) SqlLifecycleFactory(org.apache.druid.sql.SqlLifecycleFactory) StringUtils(org.apache.druid.java.util.common.StringUtils) Set(java.util.Set) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) SqlRowTransformer(org.apache.druid.sql.SqlRowTransformer) StreamingOutput(javax.ws.rs.core.StreamingOutput) IOException(java.io.IOException) Json(org.apache.druid.guice.annotations.Json) AuthorizationUtils(org.apache.druid.server.security.AuthorizationUtils) SqlLifecycleManager(org.apache.druid.sql.SqlLifecycleManager) Collectors(java.util.stream.Collectors) List(java.util.List) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) Response(javax.ws.rs.core.Response) ResourceAction(org.apache.druid.server.security.ResourceAction) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) Preconditions(com.google.common.base.Preconditions) RelOptPlanner(org.apache.calcite.plan.RelOptPlanner) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) SqlLifecycle(org.apache.druid.sql.SqlLifecycle) Access(org.apache.druid.server.security.Access) ResourceAction(org.apache.druid.server.security.ResourceAction) Path(javax.ws.rs.Path) DELETE(javax.ws.rs.DELETE) Produces(javax.ws.rs.Produces)

Aggregations

QueryInterruptedException (org.apache.druid.query.QueryInterruptedException)3 SqlLifecycle (org.apache.druid.sql.SqlLifecycle)3 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)2 CountingOutputStream (com.google.common.io.CountingOutputStream)2 IOException (java.io.IOException)2 HttpServletRequest (javax.servlet.http.HttpServletRequest)2 Consumes (javax.ws.rs.Consumes)2 POST (javax.ws.rs.POST)2 Produces (javax.ws.rs.Produces)2 Response (javax.ws.rs.core.Response)2 StreamingOutput (javax.ws.rs.core.StreamingOutput)2 RelOptPlanner (org.apache.calcite.plan.RelOptPlanner)2 SanitizableException (org.apache.druid.common.exception.SanitizableException)2 Sequence (org.apache.druid.java.util.common.guava.Sequence)2 BadQueryException (org.apache.druid.query.BadQueryException)2 QueryCapacityExceededException (org.apache.druid.query.QueryCapacityExceededException)2 QueryTimeoutException (org.apache.druid.query.QueryTimeoutException)2 QueryUnsupportedException (org.apache.druid.query.QueryUnsupportedException)2 ResourceLimitExceededException (org.apache.druid.query.ResourceLimitExceededException)2 ServerConfig (org.apache.druid.server.initialization.ServerConfig)2