Search in sources :

Example 6 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.

the class TestInvoiceDispatcher method testWithParking.

@Test(groups = "slow")
public void testWithParking() throws InvoiceApiException, AccountApiException, CatalogApiException, SubscriptionBaseApiException, TagDefinitionApiException {
    final UUID accountId = account.getId();
    final BillingEventSet events = new MockBillingEventSet();
    final Plan plan = MockPlan.createBicycleNoTrialEvergreen1USD();
    final PlanPhase planPhase = MockPlanPhase.create1USDMonthlyEvergreen();
    final DateTime effectiveDate = clock.getUTCNow().minusDays(1);
    final Currency currency = Currency.USD;
    final BigDecimal fixedPrice = null;
    events.add(invoiceUtil.createMockBillingEvent(account, subscription, effectiveDate, plan, planPhase, fixedPrice, BigDecimal.ONE, currency, BillingPeriod.MONTHLY, 1, BillingMode.IN_ADVANCE, "", 1L, SubscriptionBaseTransitionType.CREATE));
    Mockito.when(billingApi.getBillingEventsForAccountAndUpdateAccountBCD(Mockito.<UUID>any(), Mockito.<DryRunArguments>any(), Mockito.<InternalCallContext>any())).thenReturn(events);
    final LocalDate target = internalCallContext.toLocalDate(effectiveDate);
    final InvoiceNotifier invoiceNotifier = new NullInvoiceNotifier();
    final InvoiceDispatcher dispatcher = new InvoiceDispatcher(generator, accountApi, billingApi, subscriptionApi, invoiceDao, internalCallContextFactory, invoiceNotifier, invoicePluginDispatcher, locker, busService.getBus(), null, invoiceConfig, clock, parkedAccountsManager);
    // Verify initial tags state for account
    Assert.assertTrue(tagUserApi.getTagsForAccount(accountId, true, callContext).isEmpty());
    // Create chaos on disk
    final InvoiceModelDao invoiceModelDao = new InvoiceModelDao(accountId, target, target, currency, false);
    final InvoiceItemModelDao invoiceItemModelDao1 = new InvoiceItemModelDao(clock.getUTCNow(), InvoiceItemType.RECURRING, invoiceModelDao.getId(), accountId, subscription.getBundleId(), subscription.getId(), "Bad data", plan.getName(), planPhase.getName(), null, effectiveDate.toLocalDate(), effectiveDate.plusMonths(1).toLocalDate(), BigDecimal.TEN, BigDecimal.ONE, currency, null);
    final InvoiceItemModelDao invoiceItemModelDao2 = new InvoiceItemModelDao(clock.getUTCNow(), InvoiceItemType.RECURRING, invoiceModelDao.getId(), accountId, subscription.getBundleId(), subscription.getId(), "Bad data", plan.getName(), planPhase.getName(), null, effectiveDate.plusDays(1).toLocalDate(), effectiveDate.plusMonths(1).toLocalDate(), BigDecimal.TEN, BigDecimal.ONE, currency, null);
    invoiceModelDao.addInvoiceItem(invoiceItemModelDao1);
    invoiceModelDao.addInvoiceItem(invoiceItemModelDao2);
    invoiceDao.createInvoices(ImmutableList.<InvoiceModelDao>of(invoiceModelDao), context);
    try {
        dispatcher.processAccountFromNotificationOrBusEvent(accountId, target, new DryRunFutureDateArguments(), context);
        Assert.fail();
    } catch (final InvoiceApiException e) {
        Assert.assertEquals(e.getCode(), ErrorCode.UNEXPECTED_ERROR.getCode());
        Assert.assertTrue(e.getCause().getMessage().startsWith("Double billing detected"));
    }
    // Dry-run: no side effect on disk
    Assert.assertEquals(invoiceDao.getInvoicesByAccount(context).size(), 1);
    Assert.assertTrue(tagUserApi.getTagsForAccount(accountId, true, callContext).isEmpty());
    try {
        dispatcher.processAccountFromNotificationOrBusEvent(accountId, target, null, context);
        Assert.fail();
    } catch (final InvoiceApiException e) {
        Assert.assertEquals(e.getCode(), ErrorCode.UNEXPECTED_ERROR.getCode());
        Assert.assertTrue(e.getCause().getMessage().startsWith("Double billing detected"));
    }
    Assert.assertEquals(invoiceDao.getInvoicesByAccount(context).size(), 1);
    // No dry-run: account is parked
    final List<Tag> tags = tagUserApi.getTagsForAccount(accountId, false, callContext);
    Assert.assertEquals(tags.size(), 1);
    Assert.assertEquals(tags.get(0).getTagDefinitionId(), SystemTags.PARK_TAG_DEFINITION_ID);
    // isApiCall=false
    final Invoice nullInvoice1 = dispatcher.processAccountFromNotificationOrBusEvent(accountId, target, null, context);
    Assert.assertNull(nullInvoice1);
    // No dry-run and isApiCall=true
    try {
        dispatcher.processAccount(true, accountId, target, null, context);
        Assert.fail();
    } catch (final InvoiceApiException e) {
        Assert.assertEquals(e.getCode(), ErrorCode.UNEXPECTED_ERROR.getCode());
        Assert.assertTrue(e.getCause().getMessage().startsWith("Double billing detected"));
    }
    // Idempotency
    Assert.assertEquals(invoiceDao.getInvoicesByAccount(context).size(), 1);
    Assert.assertEquals(tagUserApi.getTagsForAccount(accountId, false, callContext), tags);
    // Fix state
    dbi.withHandle(new HandleCallback<Void>() {

        @Override
        public Void withHandle(final Handle handle) throws Exception {
            handle.execute("delete from invoices");
            handle.execute("delete from invoice_items");
            return null;
        }
    });
    // Dry-run and isApiCall=false: still parked
    final Invoice nullInvoice2 = dispatcher.processAccountFromNotificationOrBusEvent(accountId, target, new DryRunFutureDateArguments(), context);
    Assert.assertNull(nullInvoice2);
    // Dry-run and isApiCall=true: call goes through
    final Invoice invoice1 = dispatcher.processAccount(true, accountId, target, new DryRunFutureDateArguments(), context);
    Assert.assertNotNull(invoice1);
    Assert.assertEquals(invoiceDao.getInvoicesByAccount(context).size(), 0);
    // Dry-run: still parked
    Assert.assertEquals(tagUserApi.getTagsForAccount(accountId, false, callContext).size(), 1);
    // No dry-run and isApiCall=true: call goes through
    final Invoice invoice2 = dispatcher.processAccount(true, accountId, target, null, context);
    Assert.assertNotNull(invoice2);
    Assert.assertEquals(invoiceDao.getInvoicesByAccount(context).size(), 1);
    // No dry-run: now unparked
    Assert.assertEquals(tagUserApi.getTagsForAccount(accountId, false, callContext).size(), 0);
    Assert.assertEquals(tagUserApi.getTagsForAccount(accountId, true, callContext).size(), 1);
}
Also used : Invoice(org.killbill.billing.invoice.api.Invoice) NullInvoiceNotifier(org.killbill.billing.invoice.notification.NullInvoiceNotifier) InvoiceNotifier(org.killbill.billing.invoice.api.InvoiceNotifier) InvoiceModelDao(org.killbill.billing.invoice.dao.InvoiceModelDao) MockPlan(org.killbill.billing.catalog.MockPlan) Plan(org.killbill.billing.catalog.api.Plan) LocalDate(org.joda.time.LocalDate) DateTime(org.joda.time.DateTime) BigDecimal(java.math.BigDecimal) CatalogApiException(org.killbill.billing.catalog.api.CatalogApiException) AccountApiException(org.killbill.billing.account.api.AccountApiException) InvoiceApiException(org.killbill.billing.invoice.api.InvoiceApiException) TagDefinitionApiException(org.killbill.billing.util.api.TagDefinitionApiException) SubscriptionBaseApiException(org.killbill.billing.subscription.api.user.SubscriptionBaseApiException) Handle(org.skife.jdbi.v2.Handle) InvoiceApiException(org.killbill.billing.invoice.api.InvoiceApiException) NullInvoiceNotifier(org.killbill.billing.invoice.notification.NullInvoiceNotifier) InvoiceItemModelDao(org.killbill.billing.invoice.dao.InvoiceItemModelDao) BillingEventSet(org.killbill.billing.junction.BillingEventSet) Currency(org.killbill.billing.catalog.api.Currency) DryRunFutureDateArguments(org.killbill.billing.invoice.TestInvoiceHelper.DryRunFutureDateArguments) PlanPhase(org.killbill.billing.catalog.api.PlanPhase) MockPlanPhase(org.killbill.billing.catalog.MockPlanPhase) Tag(org.killbill.billing.util.tag.Tag) UUID(java.util.UUID) Test(org.testng.annotations.Test)

Example 7 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project druid by druid-io.

the class SQLMetadataSegmentManager method removeSegment.

@Override
public boolean removeSegment(String ds, final String segmentID) {
    try {
        connector.getDBI().withHandle(new HandleCallback<Void>() {

            @Override
            public Void withHandle(Handle handle) throws Exception {
                handle.createStatement(String.format("UPDATE %s SET used=false WHERE id = :segmentID", getSegmentsTable())).bind("segmentID", segmentID).execute();
                return null;
            }
        });
        ConcurrentHashMap<String, DruidDataSource> dataSourceMap = dataSources.get();
        if (!dataSourceMap.containsKey(ds)) {
            log.warn("Cannot find datasource %s", ds);
            return false;
        }
        DruidDataSource dataSource = dataSourceMap.get(ds);
        dataSource.removePartition(segmentID);
        if (dataSource.isEmpty()) {
            dataSourceMap.remove(ds);
        }
    } catch (Exception e) {
        log.error(e, e.toString());
        return false;
    }
    return true;
}
Also used : DruidDataSource(io.druid.client.DruidDataSource) SQLException(java.sql.SQLException) IOException(java.io.IOException) Handle(org.skife.jdbi.v2.Handle)

Example 8 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project druid by druid-io.

the class SQLMetadataSegmentManager method enableDatasource.

@Override
public boolean enableDatasource(final String ds) {
    try {
        final IDBI dbi = connector.getDBI();
        VersionedIntervalTimeline<String, DataSegment> segmentTimeline = connector.inReadOnlyTransaction(new TransactionCallback<VersionedIntervalTimeline<String, DataSegment>>() {

            @Override
            public VersionedIntervalTimeline<String, DataSegment> inTransaction(Handle handle, TransactionStatus status) throws Exception {
                return handle.createQuery(String.format("SELECT payload FROM %s WHERE dataSource = :dataSource", getSegmentsTable())).setFetchSize(connector.getStreamingFetchSize()).bind("dataSource", ds).map(ByteArrayMapper.FIRST).fold(new VersionedIntervalTimeline<String, DataSegment>(Ordering.natural()), new Folder3<VersionedIntervalTimeline<String, DataSegment>, byte[]>() {

                    @Override
                    public VersionedIntervalTimeline<String, DataSegment> fold(VersionedIntervalTimeline<String, DataSegment> timeline, byte[] payload, FoldController foldController, StatementContext statementContext) throws SQLException {
                        try {
                            final DataSegment segment = DATA_SEGMENT_INTERNER.intern(jsonMapper.readValue(payload, DataSegment.class));
                            timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment));
                            return timeline;
                        } catch (Exception e) {
                            throw new SQLException(e.toString());
                        }
                    }
                });
            }
        });
        final List<DataSegment> segments = Lists.newArrayList();
        for (TimelineObjectHolder<String, DataSegment> objectHolder : segmentTimeline.lookup(new Interval("0000-01-01/3000-01-01"))) {
            for (PartitionChunk<DataSegment> partitionChunk : objectHolder.getObject()) {
                segments.add(partitionChunk.getObject());
            }
        }
        if (segments.isEmpty()) {
            log.warn("No segments found in the database!");
            return false;
        }
        dbi.withHandle(new HandleCallback<Void>() {

            @Override
            public Void withHandle(Handle handle) throws Exception {
                Batch batch = handle.createBatch();
                for (DataSegment segment : segments) {
                    batch.add(String.format("UPDATE %s SET used=true WHERE id = '%s'", getSegmentsTable(), segment.getIdentifier()));
                }
                batch.execute();
                return null;
            }
        });
    } catch (Exception e) {
        log.error(e, "Exception enabling datasource %s", ds);
        return false;
    }
    return true;
}
Also used : IDBI(org.skife.jdbi.v2.IDBI) SQLException(java.sql.SQLException) TransactionStatus(org.skife.jdbi.v2.TransactionStatus) DataSegment(io.druid.timeline.DataSegment) SQLException(java.sql.SQLException) IOException(java.io.IOException) Handle(org.skife.jdbi.v2.Handle) StatementContext(org.skife.jdbi.v2.StatementContext) FoldController(org.skife.jdbi.v2.FoldController) Batch(org.skife.jdbi.v2.Batch) VersionedIntervalTimeline(io.druid.timeline.VersionedIntervalTimeline) Folder3(org.skife.jdbi.v2.Folder3) Interval(org.joda.time.Interval)

Example 9 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project druid by druid-io.

the class SQLMetadataSegmentPublisher method publishSegment.

@VisibleForTesting
void publishSegment(final String identifier, final String dataSource, final String createdDate, final String start, final String end, final boolean partitioned, final String version, final boolean used, final byte[] payload) {
    try {
        final DBI dbi = connector.getDBI();
        List<Map<String, Object>> exists = dbi.withHandle(new HandleCallback<List<Map<String, Object>>>() {

            @Override
            public List<Map<String, Object>> withHandle(Handle handle) throws Exception {
                return handle.createQuery(String.format("SELECT id FROM %s WHERE id=:id", config.getSegmentsTable())).bind("id", identifier).list();
            }
        });
        if (!exists.isEmpty()) {
            log.info("Found [%s] in DB, not updating DB", identifier);
            return;
        }
        dbi.withHandle(new HandleCallback<Void>() {

            @Override
            public Void withHandle(Handle handle) throws Exception {
                handle.createStatement(statement).bind("id", identifier).bind("dataSource", dataSource).bind("created_date", createdDate).bind("start", start).bind("end", end).bind("partitioned", partitioned).bind("version", version).bind("used", used).bind("payload", payload).execute();
                return null;
            }
        });
    } catch (Exception e) {
        log.error(e, "Exception inserting into DB");
        throw new RuntimeException(e);
    }
}
Also used : DBI(org.skife.jdbi.v2.DBI) IOException(java.io.IOException) Handle(org.skife.jdbi.v2.Handle) List(java.util.List) Map(java.util.Map) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 10 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.

the class DatabaseExportDao method exportDataForAccountAndTable.

private void exportDataForAccountAndTable(final DatabaseExportOutputStream out, final List<ColumnInfo> columnsForTable, final InternalTenantContext context) {
    TableType tableType = TableType.OTHER;
    final String tableName = columnsForTable.get(0).getTableName();
    if (TableName.ACCOUNT.getTableName().equals(tableName)) {
        tableType = TableType.KB_ACCOUNT;
    } else if (TableName.ACCOUNT_HISTORY.getTableName().equals(tableName)) {
        tableType = TableType.KB_ACCOUNT_HISTORY;
    }
    boolean firstColumn = true;
    final StringBuilder queryBuilder = new StringBuilder("select ");
    for (final ColumnInfo column : columnsForTable) {
        if (!firstColumn) {
            queryBuilder.append(", ");
        } else {
            firstColumn = false;
        }
        queryBuilder.append(column.getColumnName());
        if (tableType == TableType.OTHER) {
            if (column.getColumnName().equals(TableType.KB_PER_ACCOUNT.getAccountRecordIdColumnName())) {
                tableType = TableType.KB_PER_ACCOUNT;
            } else if (column.getColumnName().equals(TableType.NOTIFICATION.getAccountRecordIdColumnName())) {
                tableType = TableType.NOTIFICATION;
            }
        }
    }
    // Don't export non-account specific tables
    if (tableType == TableType.OTHER) {
        return;
    }
    // Build the query - make sure to filter by account and tenant!
    queryBuilder.append(" from ").append(tableName).append(" where ").append(tableType.getAccountRecordIdColumnName()).append(" = :accountRecordId and ").append(tableType.getTenantRecordIdColumnName()).append("  = :tenantRecordId");
    // Notify the stream that we're about to write data for a different table
    out.newTable(tableName, columnsForTable);
    dbi.withHandle(new HandleCallback<Void>() {

        @Override
        public Void withHandle(final Handle handle) throws Exception {
            final ResultIterator<Map<String, Object>> iterator = handle.createQuery(queryBuilder.toString()).bind("accountRecordId", context.getAccountRecordId()).bind("tenantRecordId", context.getTenantRecordId()).iterator();
            try {
                while (iterator.hasNext()) {
                    final Map<String, Object> row = iterator.next();
                    out.write(row);
                }
            } finally {
                iterator.close();
            }
            return null;
        }
    });
}
Also used : ResultIterator(org.skife.jdbi.v2.ResultIterator) ColumnInfo(org.killbill.billing.util.api.ColumnInfo) DefaultColumnInfo(org.killbill.billing.util.validation.DefaultColumnInfo) Handle(org.skife.jdbi.v2.Handle) Map(java.util.Map)

Aggregations

Handle (org.skife.jdbi.v2.Handle)15 Test (org.testng.annotations.Test)8 List (java.util.List)5 UUID (java.util.UUID)5 IOException (java.io.IOException)4 SQLException (java.sql.SQLException)4 Date (java.util.Date)4 ImmutableList (com.google.common.collect.ImmutableList)3 BigDecimal (java.math.BigDecimal)3 Map (java.util.Map)3 DateTime (org.joda.time.DateTime)3 LocalDate (org.joda.time.LocalDate)3 ArrayList (java.util.ArrayList)2 Interval (org.joda.time.Interval)2 Account (org.killbill.billing.account.api.Account)2 AccountData (org.killbill.billing.account.api.AccountData)2 ExpectedInvoiceItemCheck (org.killbill.billing.beatrix.util.InvoiceChecker.ExpectedInvoiceItemCheck)2 InternalCallContext (org.killbill.billing.callcontext.InternalCallContext)2 DefaultEntitlement (org.killbill.billing.entitlement.api.DefaultEntitlement)2 Invoice (org.killbill.billing.invoice.api.Invoice)2