Search in sources :

Example 11 with Bind

use of org.skife.jdbi.v2.sqlobject.Bind in project killbill by killbill.

the class EntitySqlDaoWrapperInvocationHandler method invokeWithAuditAndHistory.

private Object invokeWithAuditAndHistory(final Audited auditedAnnotation, final Method method, final Object[] args) throws Throwable {
    final InternalCallContext contextMaybeWithoutAccountRecordId = retrieveContextFromArguments(args);
    final List<String> entityIds = retrieveEntityIdsFromArguments(method, args);
    Preconditions.checkState(!entityIds.isEmpty(), "@Audited Sql method must have entities (@Bind(\"id\")) as arguments");
    // We cannot always infer the TableName from the signature
    TableName tableName = retrieveTableNameFromArgumentsIfPossible(Arrays.asList(args));
    final ChangeType changeType = auditedAnnotation.value();
    final boolean isBatchQuery = method.getAnnotation(SqlBatch.class) != null;
    // Get the current state before deletion for the history tables
    final Map<Long, M> deletedAndUpdatedEntities = new HashMap<Long, M>();
    // Real jdbc call
    final Object obj = prof.executeWithProfiling(ProfilingFeatureType.DAO_DETAILS, getProfilingId("raw", method), new WithProfilingCallback<Object, Throwable>() {

        @Override
        public Object execute() throws Throwable {
            return executeJDBCCall(method, args);
        }
    });
    if (entityIds.isEmpty()) {
        return obj;
    }
    InternalCallContext context = null;
    // Retrieve record_id(s) for audit and history tables
    final List<Long> entityRecordIds = new LinkedList<Long>();
    if (changeType == ChangeType.INSERT) {
        Preconditions.checkNotNull(tableName, "Insert query should have an EntityModelDao as argument: %s", args);
        if (isBatchQuery) {
            entityRecordIds.addAll((Collection<? extends Long>) obj);
        } else {
            entityRecordIds.add((Long) obj);
        }
        // Snowflake
        if (TableName.ACCOUNT.equals(tableName)) {
            Preconditions.checkState(entityIds.size() == 1, "Bulk insert of accounts isn't supported");
            // AccountModelDao in practice
            final TimeZoneAwareEntity accountModelDao = retrieveTimeZoneAwareEntityFromArguments(args);
            context = internalCallContextFactory.createInternalCallContext(accountModelDao, entityRecordIds.get(0), contextMaybeWithoutAccountRecordId);
        }
    } else {
        // Rehydrate entry with latest state
        final List<M> retrievedEntities = sqlDao.getByIdsIncludedDeleted(entityIds, contextMaybeWithoutAccountRecordId);
        printSQLWarnings();
        for (final M entity : retrievedEntities) {
            deletedAndUpdatedEntities.put(entity.getRecordId(), entity);
            entityRecordIds.add(entity.getRecordId());
            if (tableName == null) {
                tableName = entity.getTableName();
            } else {
                Preconditions.checkState(tableName == entity.getTableName(), "Entities with different TableName");
            }
        }
    }
    Preconditions.checkState(entityIds.size() == entityRecordIds.size(), "SqlDao method has %s as ids but found %s as recordIds", entityIds, entityRecordIds);
    // Context validations
    if (context != null) {
        // context was already updated, see above (createAccount code path). Just make sure we don't attempt to bulk create
        Preconditions.checkState(entityIds.size() == 1, "Bulk insert of accounts isn't supported");
    } else {
        context = contextMaybeWithoutAccountRecordId;
        final boolean tableWithoutAccountRecordId = tableName == TableName.TENANT || tableName == TableName.TENANT_BROADCASTS || tableName == TableName.TENANT_KVS || tableName == TableName.TAG_DEFINITIONS || tableName == TableName.SERVICE_BRODCASTS || tableName == TableName.NODE_INFOS;
        Preconditions.checkState(context.getAccountRecordId() != null || tableWithoutAccountRecordId, "accountRecordId should be set for tableName=%s and changeType=%s", tableName, changeType);
    }
    final Collection<M> reHydratedEntities = updateHistoryAndAudit(entityRecordIds, deletedAndUpdatedEntities, tableName, changeType, context);
    if (method.getReturnType().equals(Void.TYPE)) {
        // Return early
        return null;
    } else if (isBatchQuery) {
        // Return the raw jdbc response (generated keys)
        return obj;
    } else {
        // PERF: override the return value with the reHydrated entity to avoid an extra 'get' in the transaction,
        // (see EntityDaoBase#createAndRefresh for an example, but it works for updates as well).
        Preconditions.checkState(entityRecordIds.size() == 1, "Invalid number of entityRecordIds: %s", entityRecordIds);
        if (!reHydratedEntities.isEmpty()) {
            Preconditions.checkState(reHydratedEntities.size() == 1, "Invalid number of entities: %s", reHydratedEntities);
            return Iterables.<M>getFirst(reHydratedEntities, null);
        } else {
            // Updated entity not retrieved yet, we have to go back to the database
            final M entity = sqlDao.getByRecordId(entityRecordIds.get(0), context);
            printSQLWarnings();
            return entity;
        }
    }
}
Also used : HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) InternalCallContext(org.killbill.billing.callcontext.InternalCallContext) LinkedList(java.util.LinkedList) SqlBatch(org.skife.jdbi.v2.sqlobject.SqlBatch) TableName(org.killbill.billing.util.dao.TableName) ChangeType(org.killbill.billing.util.audit.ChangeType)

Example 12 with Bind

use of org.skife.jdbi.v2.sqlobject.Bind in project killbill by killbill.

the class TestPermissionAnnotationMethodInterceptor method testAOPForInterface.

@Test(groups = "fast")
public void testAOPForInterface() throws Exception {
    // Make sure it works as expected without any AOP magic
    final IAopTester simpleTester = new AopTesterImpl();
    try {
        simpleTester.createRefund();
    } catch (Exception e) {
        Assert.fail(e.getLocalizedMessage());
    }
    // Now, verify the interception works
    configureShiro();
    final Injector injector = Guice.createInjector(Stage.PRODUCTION, new ShiroModuleNoDB(configSource), new KillBillShiroAopModule(configSource), new TestSecurityModuleNoDB(configSource), new CacheModule(configSource), new AbstractModule() {

        @Override
        public void configure() {
            bind(IDBI.class).toInstance(Mockito.mock(IDBI.class));
            bind(IDBI.class).annotatedWith(Names.named(MAIN_RO_DATA_SOURCE_ID)).toInstance(Mockito.mock(IDBI.class));
            bind(IAopTester.class).to(AopTesterImpl.class).asEagerSingleton();
            bind(TenantInternalApi.class).toInstance(Mockito.mock(TenantInternalApi.class));
            bind(NonEntityDao.class).toInstance(Mockito.mock(NonEntityDao.class));
        }
    });
    final IAopTester aopedTester = injector.getInstance(IAopTester.class);
    verifyAopedTester(aopedTester);
}
Also used : TestSecurityModuleNoDB(org.killbill.billing.util.glue.TestSecurityModuleNoDB) IDBI(org.skife.jdbi.v2.IDBI) KillBillShiroAopModule(org.killbill.billing.util.glue.KillBillShiroAopModule) Injector(com.google.inject.Injector) CacheModule(org.killbill.billing.util.glue.CacheModule) ShiroModuleNoDB(org.killbill.billing.util.glue.TestUtilModuleNoDB.ShiroModuleNoDB) UnauthenticatedException(org.apache.shiro.authz.UnauthenticatedException) AuthorizationException(org.apache.shiro.authz.AuthorizationException) AbstractModule(com.google.inject.AbstractModule) Test(org.testng.annotations.Test)

Example 13 with Bind

use of org.skife.jdbi.v2.sqlobject.Bind in project killbill by killbill.

the class TestPermissionAnnotationMethodInterceptor method testAOPForClass.

@Test(groups = "fast")
public void testAOPForClass() throws Exception {
    // Make sure it works as expected without any AOP magic
    final IAopTester simpleTester = new AopTester();
    try {
        simpleTester.createRefund();
    } catch (Exception e) {
        Assert.fail(e.getLocalizedMessage());
    }
    // Now, verify the interception works
    configureShiro();
    final Injector injector = Guice.createInjector(Stage.PRODUCTION, new ShiroModuleNoDB(configSource), new KillBillShiroAopModule(configSource), new TestSecurityModuleNoDB(configSource), new CacheModule(configSource), new AbstractModule() {

        @Override
        protected void configure() {
            bind(IDBI.class).toInstance(Mockito.mock(IDBI.class));
            bind(IDBI.class).annotatedWith(Names.named(MAIN_RO_DATA_SOURCE_ID)).toInstance(Mockito.mock(IDBI.class));
            bind(TenantInternalApi.class).toInstance(Mockito.mock(TenantInternalApi.class));
            bind(NonEntityDao.class).toInstance(Mockito.mock(NonEntityDao.class));
        }
    });
    final AopTester aopedTester = injector.getInstance(AopTester.class);
    verifyAopedTester(aopedTester);
}
Also used : TestSecurityModuleNoDB(org.killbill.billing.util.glue.TestSecurityModuleNoDB) IDBI(org.skife.jdbi.v2.IDBI) KillBillShiroAopModule(org.killbill.billing.util.glue.KillBillShiroAopModule) Injector(com.google.inject.Injector) CacheModule(org.killbill.billing.util.glue.CacheModule) ShiroModuleNoDB(org.killbill.billing.util.glue.TestUtilModuleNoDB.ShiroModuleNoDB) UnauthenticatedException(org.apache.shiro.authz.UnauthenticatedException) AuthorizationException(org.apache.shiro.authz.AuthorizationException) AbstractModule(com.google.inject.AbstractModule) Test(org.testng.annotations.Test)

Example 14 with Bind

use of org.skife.jdbi.v2.sqlobject.Bind in project druid by druid-io.

the class SqlSegmentsMetadataQuery method retrieveSegments.

private CloseableIterator<DataSegment> retrieveSegments(final String dataSource, final Collection<Interval> intervals, final IntervalMode matchMode, final boolean used) {
    // Check if the intervals all support comparing as strings. If so, bake them into the SQL.
    final boolean compareAsString = intervals.stream().allMatch(Intervals::canCompareEndpointsAsStrings);
    final StringBuilder sb = new StringBuilder();
    sb.append("SELECT payload FROM %s WHERE used = :used AND dataSource = :dataSource");
    if (compareAsString && !intervals.isEmpty()) {
        sb.append(" AND (");
        for (int i = 0; i < intervals.size(); i++) {
            sb.append(matchMode.makeSqlCondition(connector.getQuoteString(), StringUtils.format(":start%d", i), StringUtils.format(":end%d", i)));
            if (i == intervals.size() - 1) {
                sb.append(")");
            } else {
                sb.append(" OR ");
            }
        }
    }
    final Query<Map<String, Object>> sql = handle.createQuery(StringUtils.format(sb.toString(), dbTables.getSegmentsTable())).setFetchSize(connector.getStreamingFetchSize()).bind("used", used).bind("dataSource", dataSource);
    if (compareAsString) {
        final Iterator<Interval> iterator = intervals.iterator();
        for (int i = 0; iterator.hasNext(); i++) {
            Interval interval = iterator.next();
            sql.bind(StringUtils.format("start%d", i), interval.getStart().toString()).bind(StringUtils.format("end%d", i), interval.getEnd().toString());
        }
    }
    final ResultIterator<DataSegment> resultIterator = sql.map((index, r, ctx) -> JacksonUtils.readValue(jsonMapper, r.getBytes(1), DataSegment.class)).iterator();
    return CloseableIterators.wrap(Iterators.filter(resultIterator, dataSegment -> {
        if (intervals.isEmpty()) {
            return true;
        } else {
            // segment interval like "20010/20011".)
            for (Interval interval : intervals) {
                if (matchMode.apply(interval, dataSegment.getInterval())) {
                    return true;
                }
            }
            return false;
        }
    }), resultIterator);
}
Also used : Intervals(org.apache.druid.java.util.common.Intervals) Logger(org.apache.druid.java.util.common.logger.Logger) JacksonUtils(org.apache.druid.java.util.common.jackson.JacksonUtils) Iterator(java.util.Iterator) Intervals(org.apache.druid.java.util.common.Intervals) Collection(java.util.Collection) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) StringUtils(org.apache.druid.java.util.common.StringUtils) Query(org.skife.jdbi.v2.Query) Collectors(java.util.stream.Collectors) Iterators(com.google.common.collect.Iterators) Interval(org.joda.time.Interval) List(java.util.List) CloseableIterators(org.apache.druid.java.util.common.CloseableIterators) Handle(org.skife.jdbi.v2.Handle) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) DataSegment(org.apache.druid.timeline.DataSegment) ResultIterator(org.skife.jdbi.v2.ResultIterator) PreparedBatch(org.skife.jdbi.v2.PreparedBatch) IAE(org.apache.druid.java.util.common.IAE) SegmentId(org.apache.druid.timeline.SegmentId) CloseableIterator(org.apache.druid.java.util.common.parsers.CloseableIterator) Collections(java.util.Collections) Map(java.util.Map) DataSegment(org.apache.druid.timeline.DataSegment) Interval(org.joda.time.Interval)

Example 15 with Bind

use of org.skife.jdbi.v2.sqlobject.Bind in project druid by druid-io.

the class SQLMetadataSegmentPublisher method publishSegment.

@VisibleForTesting
void publishSegment(final String segmentId, final String dataSource, final String createdDate, final String start, final String end, final boolean partitioned, final String version, final boolean used, final byte[] payload) {
    try {
        final DBI dbi = connector.getDBI();
        List<Map<String, Object>> exists = dbi.withHandle(new HandleCallback<List<Map<String, Object>>>() {

            @Override
            public List<Map<String, Object>> withHandle(Handle handle) {
                return handle.createQuery(StringUtils.format("SELECT id FROM %s WHERE id=:id", config.getSegmentsTable())).bind("id", segmentId).list();
            }
        });
        if (!exists.isEmpty()) {
            log.info("Found [%s] in DB, not updating DB", segmentId);
            return;
        }
        dbi.withHandle(new HandleCallback<Void>() {

            @Override
            public Void withHandle(Handle handle) {
                handle.createStatement(statement).bind("id", segmentId).bind("dataSource", dataSource).bind("created_date", createdDate).bind("start", start).bind("end", end).bind("partitioned", partitioned).bind("version", version).bind("used", used).bind("payload", payload).execute();
                return null;
            }
        });
    } catch (Exception e) {
        log.error(e, "Exception inserting into DB");
        throw new RuntimeException(e);
    }
}
Also used : DBI(org.skife.jdbi.v2.DBI) IOException(java.io.IOException) Handle(org.skife.jdbi.v2.Handle) List(java.util.List) Map(java.util.Map) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

Handle (org.skife.jdbi.v2.Handle)23 IOException (java.io.IOException)12 SQLException (java.sql.SQLException)7 ArrayList (java.util.ArrayList)7 Map (java.util.Map)7 List (java.util.List)6 DataSegment (org.apache.druid.timeline.DataSegment)5 CallbackFailedException (org.skife.jdbi.v2.exceptions.CallbackFailedException)5 Test (org.junit.Test)4 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)3 ImmutableList (com.google.common.collect.ImmutableList)3 ResultSet (java.sql.ResultSet)3 Interval (org.joda.time.Interval)3 DBI (org.skife.jdbi.v2.DBI)3 IDBI (org.skife.jdbi.v2.IDBI)3 Query (org.skife.jdbi.v2.Query)3 StatementContext (org.skife.jdbi.v2.StatementContext)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 AbstractModule (com.google.inject.AbstractModule)2