Search in sources :

Example 11 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project druid by druid-io.

the class JDBCExtractionNamespaceCacheFactory method populateCache.

@Override
@Nullable
public CacheScheduler.VersionedCache populateCache(final JDBCExtractionNamespace namespace, final CacheScheduler.EntryImpl<JDBCExtractionNamespace> entryId, final String lastVersion, final CacheScheduler scheduler) {
    final long lastCheck = lastVersion == null ? JodaUtils.MIN_INSTANT : Long.parseLong(lastVersion);
    final Long lastDBUpdate = lastUpdates(entryId, namespace);
    if (lastDBUpdate != null && lastDBUpdate <= lastCheck) {
        return null;
    }
    final long dbQueryStart = System.currentTimeMillis();
    final DBI dbi = ensureDBI(entryId, namespace);
    final String table = namespace.getTable();
    final String valueColumn = namespace.getValueColumn();
    final String keyColumn = namespace.getKeyColumn();
    LOG.debug("Updating %s", entryId);
    final List<Pair<String, String>> pairs = dbi.withHandle(new HandleCallback<List<Pair<String, String>>>() {

        @Override
        public List<Pair<String, String>> withHandle(Handle handle) throws Exception {
            final String query;
            query = String.format("SELECT %s, %s FROM %s", keyColumn, valueColumn, table);
            return handle.createQuery(query).map(new ResultSetMapper<Pair<String, String>>() {

                @Override
                public Pair<String, String> map(final int index, final ResultSet r, final StatementContext ctx) throws SQLException {
                    return new Pair<>(r.getString(keyColumn), r.getString(valueColumn));
                }
            }).list();
        }
    });
    final String newVersion;
    if (lastDBUpdate != null) {
        newVersion = lastDBUpdate.toString();
    } else {
        newVersion = String.format("%d", dbQueryStart);
    }
    final CacheScheduler.VersionedCache versionedCache = scheduler.createVersionedCache(entryId, newVersion);
    try {
        final Map<String, String> cache = versionedCache.getCache();
        for (Pair<String, String> pair : pairs) {
            cache.put(pair.lhs, pair.rhs);
        }
        LOG.info("Finished loading %d values for %s", cache.size(), entryId);
        return versionedCache;
    } catch (Throwable t) {
        try {
            versionedCache.close();
        } catch (Exception e) {
            t.addSuppressed(e);
        }
        throw t;
    }
}
Also used : SQLException(java.sql.SQLException) DBI(org.skife.jdbi.v2.DBI) SQLException(java.sql.SQLException) Handle(org.skife.jdbi.v2.Handle) StatementContext(org.skife.jdbi.v2.StatementContext) ResultSet(java.sql.ResultSet) List(java.util.List) CacheScheduler(io.druid.server.lookup.namespace.cache.CacheScheduler) Pair(io.druid.java.util.common.Pair) Nullable(javax.annotation.Nullable)

Example 12 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.

the class TestInternalCallContextFactory method testCreateInternalCallContextWithAccountRecordIdFromAccountObjectType.

@Test(groups = "slow")
public void testCreateInternalCallContextWithAccountRecordIdFromAccountObjectType() throws Exception {
    final UUID accountId = UUID.randomUUID();
    final Long accountRecordId = 19384012L;
    dbi.withHandle(new HandleCallback<Void>() {

        @Override
        public Void withHandle(final Handle handle) throws Exception {
            // Note: we always create an accounts table, see MysqlTestingHelper
            handle.execute("insert into accounts (record_id, id, email, name, first_name_length, is_notified_for_invoices, created_date, created_by, updated_date, updated_by) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", accountRecordId, accountId.toString(), "yo@t.com", "toto", 4, false, new Date(), "i", new Date(), "j");
            return null;
        }
    });
    final InternalCallContext context = internalCallContextFactory.createInternalCallContext(accountId, ObjectType.ACCOUNT, callContext);
    // The account record id should have been looked up in the accounts table
    Assert.assertEquals(context.getAccountRecordId(), accountRecordId);
    verifyInternalCallContext(context);
}
Also used : InternalCallContext(org.killbill.billing.callcontext.InternalCallContext) UUID(java.util.UUID) Date(java.util.Date) Handle(org.skife.jdbi.v2.Handle) Test(org.testng.annotations.Test)

Example 13 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.

the class TestDefaultCustomFieldUserApi method testSaveCustomFieldWithAccountRecordId.

@Test(groups = "slow")
public void testSaveCustomFieldWithAccountRecordId() throws Exception {
    final UUID accountId = UUID.randomUUID();
    final Long accountRecordId = 19384012L;
    dbi.withHandle(new HandleCallback<Void>() {

        @Override
        public Void withHandle(final Handle handle) throws Exception {
            // Note: we always create an accounts table, see MysqlTestingHelper
            handle.execute("insert into accounts (record_id, id, email, name, first_name_length, is_notified_for_invoices, created_date, created_by, updated_date, updated_by) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", accountRecordId, accountId.toString(), "yo@t.com", "toto", 4, false, new Date(), "i", new Date(), "j");
            return null;
        }
    });
    checkPagination(0);
    final String cfName = UUID.randomUUID().toString().substring(1, 4);
    final String cfValue = UUID.randomUUID().toString().substring(1, 4);
    final CustomField customField = new StringCustomField(cfName, cfValue, ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
    eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
    customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField), callContext);
    assertListenerStatus();
    checkPagination(1);
    // Verify the field was saved
    final List<CustomField> customFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
    Assert.assertEquals(customFields.size(), 1);
    Assert.assertEquals(customFields.get(0).getFieldName(), customField.getFieldName());
    Assert.assertEquals(customFields.get(0).getFieldValue(), customField.getFieldValue());
    Assert.assertEquals(customFields.get(0).getObjectId(), customField.getObjectId());
    Assert.assertEquals(customFields.get(0).getObjectType(), customField.getObjectType());
    // Verify the account_record_id was populated
    dbi.withHandle(new HandleCallback<Void>() {

        @Override
        public Void withHandle(final Handle handle) throws Exception {
            final List<Map<String, Object>> values = handle.select("select account_record_id from custom_fields where object_id = ?", accountId.toString());
            Assert.assertEquals(values.size(), 1);
            Assert.assertEquals(values.get(0).keySet().size(), 1);
            Assert.assertEquals(Long.valueOf(values.get(0).get("account_record_id").toString()), accountRecordId);
            return null;
        }
    });
    eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
    customFieldUserApi.removeCustomFields(customFields, callContext);
    assertListenerStatus();
    List<CustomField> remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
    Assert.assertEquals(remainingCustomFields.size(), 0);
    checkPagination(0);
    // Add again the custom field
    final CustomField newCustomField = new StringCustomField(cfName, cfValue, ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
    eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
    customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(newCustomField), callContext);
    assertListenerStatus();
    remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
    Assert.assertEquals(remainingCustomFields.size(), 1);
    checkPagination(1);
    // Delete again
    eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
    customFieldUserApi.removeCustomFields(remainingCustomFields, callContext);
    assertListenerStatus();
    remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
    Assert.assertEquals(remainingCustomFields.size(), 0);
    checkPagination(0);
}
Also used : StringCustomField(org.killbill.billing.util.customfield.StringCustomField) Date(java.util.Date) Handle(org.skife.jdbi.v2.Handle) StringCustomField(org.killbill.billing.util.customfield.StringCustomField) CustomField(org.killbill.billing.util.customfield.CustomField) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) UUID(java.util.UUID) Test(org.testng.annotations.Test)

Example 14 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.

the class TestDefaultTagUserApi method testSaveTagWithAccountRecordId.

@Test(groups = "slow")
public void testSaveTagWithAccountRecordId() throws Exception {
    final UUID accountId = UUID.randomUUID();
    final Long accountRecordId = 19384012L;
    dbi.withHandle(new HandleCallback<Void>() {

        @Override
        public Void withHandle(final Handle handle) throws Exception {
            // Note: we always create an accounts table, see MysqlTestingHelper
            handle.execute("insert into accounts (record_id, id, email, name, first_name_length, is_notified_for_invoices, created_date, created_by, updated_date, updated_by) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", accountRecordId, accountId.toString(), "yo@t.com", "toto", 4, false, new Date(), "i", new Date(), "j");
            return null;
        }
    });
    checkPagination(0);
    eventsListener.pushExpectedEvent(NextEvent.TAG);
    tagUserApi.addTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
    assertListenerStatus();
    checkPagination(1);
    // Verify the tag was saved
    final List<Tag> tags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, true, callContext);
    Assert.assertEquals(tags.size(), 1);
    Assert.assertEquals(tags.get(0).getTagDefinitionId(), ControlTagType.WRITTEN_OFF.getId());
    Assert.assertEquals(tags.get(0).getObjectId(), accountId);
    Assert.assertEquals(tags.get(0).getObjectType(), ObjectType.ACCOUNT);
    // Verify the account_record_id was populated
    dbi.withHandle(new HandleCallback<Void>() {

        @Override
        public Void withHandle(final Handle handle) throws Exception {
            final List<Map<String, Object>> values = handle.select("select account_record_id from tags where object_id = ?", accountId.toString());
            Assert.assertEquals(values.size(), 1);
            Assert.assertEquals(values.get(0).keySet().size(), 1);
            Assert.assertEquals(Long.valueOf(values.get(0).get("account_record_id").toString()), accountRecordId);
            return null;
        }
    });
    eventsListener.pushExpectedEvent(NextEvent.TAG);
    tagUserApi.removeTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
    assertListenerStatus();
    List<Tag> remainingTags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, false, callContext);
    Assert.assertEquals(remainingTags.size(), 0);
    checkPagination(0);
    // Add again the tag
    eventsListener.pushExpectedEvent(NextEvent.TAG);
    tagUserApi.addTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
    assertListenerStatus();
    remainingTags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, false, callContext);
    Assert.assertEquals(remainingTags.size(), 1);
    checkPagination(1);
    // Delete again
    eventsListener.pushExpectedEvent(NextEvent.TAG);
    tagUserApi.removeTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
    assertListenerStatus();
    remainingTags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, false, callContext);
    Assert.assertEquals(remainingTags.size(), 0);
    checkPagination(0);
}
Also used : Date(java.util.Date) Handle(org.skife.jdbi.v2.Handle) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Tag(org.killbill.billing.util.tag.Tag) UUID(java.util.UUID) Test(org.testng.annotations.Test)

Example 15 with HandleCallback

use of org.skife.jdbi.v2.tweak.HandleCallback in project druid by druid-io.

the class HadoopConverterJobTest method setUp.

@Before
public void setUp() throws Exception {
    final MetadataStorageUpdaterJobSpec metadataStorageUpdaterJobSpec = new MetadataStorageUpdaterJobSpec() {

        @Override
        public String getSegmentTable() {
            return derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable();
        }

        @Override
        public MetadataStorageConnectorConfig get() {
            return derbyConnectorRule.getMetadataConnectorConfig();
        }
    };
    final File scratchFileDir = temporaryFolder.newFolder();
    storageLocProperty = System.getProperty(STORAGE_PROPERTY_KEY);
    tmpSegmentDir = temporaryFolder.newFolder();
    System.setProperty(STORAGE_PROPERTY_KEY, tmpSegmentDir.getAbsolutePath());
    final URL url = Preconditions.checkNotNull(Query.class.getClassLoader().getResource("druid.sample.tsv"));
    final File tmpInputFile = temporaryFolder.newFile();
    FileUtils.retryCopy(new ByteSource() {

        @Override
        public InputStream openStream() throws IOException {
            return url.openStream();
        }
    }, tmpInputFile, FileUtils.IS_EXCEPTION, 3);
    final HadoopDruidIndexerConfig hadoopDruidIndexerConfig = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(new DataSchema(DATASOURCE, HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(new StringInputRowParser(new DelimitedParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList(TestIndex.DIMENSIONS)), null, null), "\t", "", Arrays.asList(TestIndex.COLUMNS)), null), Map.class), new AggregatorFactory[] { new DoubleSumAggregatorFactory(TestIndex.METRICS[0], TestIndex.METRICS[0]), new HyperUniquesAggregatorFactory("quality_uniques", "quality") }, new UniformGranularitySpec(Granularities.MONTH, Granularities.DAY, ImmutableList.<Interval>of(interval)), HadoopDruidIndexerConfig.JSON_MAPPER), new HadoopIOConfig(ImmutableMap.<String, Object>of("type", "static", "paths", tmpInputFile.getAbsolutePath()), metadataStorageUpdaterJobSpec, tmpSegmentDir.getAbsolutePath()), new HadoopTuningConfig(scratchFileDir.getAbsolutePath(), null, null, null, null, null, false, false, false, false, null, false, false, null, null, null, false, false)));
    metadataStorageTablesConfigSupplier = derbyConnectorRule.metadataTablesConfigSupplier();
    connector = derbyConnectorRule.getConnector();
    try {
        connector.getDBI().withHandle(new HandleCallback<Void>() {

            @Override
            public Void withHandle(Handle handle) throws Exception {
                handle.execute("DROP TABLE druid_segments");
                return null;
            }
        });
    } catch (CallbackFailedException e) {
    // Who cares
    }
    List<Jobby> jobs = ImmutableList.of(new Jobby() {

        @Override
        public boolean run() {
            connector.createSegmentTable(metadataStorageUpdaterJobSpec.getSegmentTable());
            return true;
        }
    }, new HadoopDruidDetermineConfigurationJob(hadoopDruidIndexerConfig), new HadoopDruidIndexerJob(hadoopDruidIndexerConfig, new SQLMetadataStorageUpdaterJobHandler(connector)));
    JobHelper.runJobs(jobs, hadoopDruidIndexerConfig);
}
Also used : HadoopIngestionSpec(io.druid.indexer.HadoopIngestionSpec) HadoopTuningConfig(io.druid.indexer.HadoopTuningConfig) URL(java.net.URL) HadoopIOConfig(io.druid.indexer.HadoopIOConfig) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) TimestampSpec(io.druid.data.input.impl.TimestampSpec) SQLMetadataStorageUpdaterJobHandler(io.druid.indexer.SQLMetadataStorageUpdaterJobHandler) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) InputStream(java.io.InputStream) DelimitedParseSpec(io.druid.data.input.impl.DelimitedParseSpec) IOException(java.io.IOException) HadoopDruidIndexerConfig(io.druid.indexer.HadoopDruidIndexerConfig) IOException(java.io.IOException) CallbackFailedException(org.skife.jdbi.v2.exceptions.CallbackFailedException) Handle(org.skife.jdbi.v2.Handle) CallbackFailedException(org.skife.jdbi.v2.exceptions.CallbackFailedException) DataSchema(io.druid.segment.indexing.DataSchema) Jobby(io.druid.indexer.Jobby) HadoopDruidIndexerJob(io.druid.indexer.HadoopDruidIndexerJob) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) HyperUniquesAggregatorFactory(io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) ByteSource(com.google.common.io.ByteSource) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) File(java.io.File) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HadoopDruidDetermineConfigurationJob(io.druid.indexer.HadoopDruidDetermineConfigurationJob) Interval(org.joda.time.Interval) Before(org.junit.Before)

Aggregations

Handle (org.skife.jdbi.v2.Handle)15 Test (org.testng.annotations.Test)8 List (java.util.List)5 UUID (java.util.UUID)5 IOException (java.io.IOException)4 SQLException (java.sql.SQLException)4 Date (java.util.Date)4 ImmutableList (com.google.common.collect.ImmutableList)3 BigDecimal (java.math.BigDecimal)3 Map (java.util.Map)3 DateTime (org.joda.time.DateTime)3 LocalDate (org.joda.time.LocalDate)3 ArrayList (java.util.ArrayList)2 Interval (org.joda.time.Interval)2 Account (org.killbill.billing.account.api.Account)2 AccountData (org.killbill.billing.account.api.AccountData)2 ExpectedInvoiceItemCheck (org.killbill.billing.beatrix.util.InvoiceChecker.ExpectedInvoiceItemCheck)2 InternalCallContext (org.killbill.billing.callcontext.InternalCallContext)2 DefaultEntitlement (org.killbill.billing.entitlement.api.DefaultEntitlement)2 Invoice (org.killbill.billing.invoice.api.Invoice)2