use of org.skife.jdbi.v2.tweak.HandleCallback in project druid by druid-io.
the class JDBCExtractionNamespaceCacheFactory method populateCache.
@Override
@Nullable
public CacheScheduler.VersionedCache populateCache(final JDBCExtractionNamespace namespace, final CacheScheduler.EntryImpl<JDBCExtractionNamespace> entryId, final String lastVersion, final CacheScheduler scheduler) {
final long lastCheck = lastVersion == null ? JodaUtils.MIN_INSTANT : Long.parseLong(lastVersion);
final Long lastDBUpdate = lastUpdates(entryId, namespace);
if (lastDBUpdate != null && lastDBUpdate <= lastCheck) {
return null;
}
final long dbQueryStart = System.currentTimeMillis();
final DBI dbi = ensureDBI(entryId, namespace);
final String table = namespace.getTable();
final String valueColumn = namespace.getValueColumn();
final String keyColumn = namespace.getKeyColumn();
LOG.debug("Updating %s", entryId);
final List<Pair<String, String>> pairs = dbi.withHandle(new HandleCallback<List<Pair<String, String>>>() {
@Override
public List<Pair<String, String>> withHandle(Handle handle) throws Exception {
final String query;
query = String.format("SELECT %s, %s FROM %s", keyColumn, valueColumn, table);
return handle.createQuery(query).map(new ResultSetMapper<Pair<String, String>>() {
@Override
public Pair<String, String> map(final int index, final ResultSet r, final StatementContext ctx) throws SQLException {
return new Pair<>(r.getString(keyColumn), r.getString(valueColumn));
}
}).list();
}
});
final String newVersion;
if (lastDBUpdate != null) {
newVersion = lastDBUpdate.toString();
} else {
newVersion = String.format("%d", dbQueryStart);
}
final CacheScheduler.VersionedCache versionedCache = scheduler.createVersionedCache(entryId, newVersion);
try {
final Map<String, String> cache = versionedCache.getCache();
for (Pair<String, String> pair : pairs) {
cache.put(pair.lhs, pair.rhs);
}
LOG.info("Finished loading %d values for %s", cache.size(), entryId);
return versionedCache;
} catch (Throwable t) {
try {
versionedCache.close();
} catch (Exception e) {
t.addSuppressed(e);
}
throw t;
}
}
use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.
the class TestInternalCallContextFactory method testCreateInternalCallContextWithAccountRecordIdFromAccountObjectType.
@Test(groups = "slow")
public void testCreateInternalCallContextWithAccountRecordIdFromAccountObjectType() throws Exception {
final UUID accountId = UUID.randomUUID();
final Long accountRecordId = 19384012L;
dbi.withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(final Handle handle) throws Exception {
// Note: we always create an accounts table, see MysqlTestingHelper
handle.execute("insert into accounts (record_id, id, email, name, first_name_length, is_notified_for_invoices, created_date, created_by, updated_date, updated_by) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", accountRecordId, accountId.toString(), "yo@t.com", "toto", 4, false, new Date(), "i", new Date(), "j");
return null;
}
});
final InternalCallContext context = internalCallContextFactory.createInternalCallContext(accountId, ObjectType.ACCOUNT, callContext);
// The account record id should have been looked up in the accounts table
Assert.assertEquals(context.getAccountRecordId(), accountRecordId);
verifyInternalCallContext(context);
}
use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.
the class TestDefaultCustomFieldUserApi method testSaveCustomFieldWithAccountRecordId.
@Test(groups = "slow")
public void testSaveCustomFieldWithAccountRecordId() throws Exception {
final UUID accountId = UUID.randomUUID();
final Long accountRecordId = 19384012L;
dbi.withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(final Handle handle) throws Exception {
// Note: we always create an accounts table, see MysqlTestingHelper
handle.execute("insert into accounts (record_id, id, email, name, first_name_length, is_notified_for_invoices, created_date, created_by, updated_date, updated_by) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", accountRecordId, accountId.toString(), "yo@t.com", "toto", 4, false, new Date(), "i", new Date(), "j");
return null;
}
});
checkPagination(0);
final String cfName = UUID.randomUUID().toString().substring(1, 4);
final String cfValue = UUID.randomUUID().toString().substring(1, 4);
final CustomField customField = new StringCustomField(cfName, cfValue, ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField), callContext);
assertListenerStatus();
checkPagination(1);
// Verify the field was saved
final List<CustomField> customFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(customFields.size(), 1);
Assert.assertEquals(customFields.get(0).getFieldName(), customField.getFieldName());
Assert.assertEquals(customFields.get(0).getFieldValue(), customField.getFieldValue());
Assert.assertEquals(customFields.get(0).getObjectId(), customField.getObjectId());
Assert.assertEquals(customFields.get(0).getObjectType(), customField.getObjectType());
// Verify the account_record_id was populated
dbi.withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(final Handle handle) throws Exception {
final List<Map<String, Object>> values = handle.select("select account_record_id from custom_fields where object_id = ?", accountId.toString());
Assert.assertEquals(values.size(), 1);
Assert.assertEquals(values.get(0).keySet().size(), 1);
Assert.assertEquals(Long.valueOf(values.get(0).get("account_record_id").toString()), accountRecordId);
return null;
}
});
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.removeCustomFields(customFields, callContext);
assertListenerStatus();
List<CustomField> remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(remainingCustomFields.size(), 0);
checkPagination(0);
// Add again the custom field
final CustomField newCustomField = new StringCustomField(cfName, cfValue, ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(newCustomField), callContext);
assertListenerStatus();
remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(remainingCustomFields.size(), 1);
checkPagination(1);
// Delete again
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.removeCustomFields(remainingCustomFields, callContext);
assertListenerStatus();
remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(remainingCustomFields.size(), 0);
checkPagination(0);
}
use of org.skife.jdbi.v2.tweak.HandleCallback in project killbill by killbill.
the class TestDefaultTagUserApi method testSaveTagWithAccountRecordId.
@Test(groups = "slow")
public void testSaveTagWithAccountRecordId() throws Exception {
final UUID accountId = UUID.randomUUID();
final Long accountRecordId = 19384012L;
dbi.withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(final Handle handle) throws Exception {
// Note: we always create an accounts table, see MysqlTestingHelper
handle.execute("insert into accounts (record_id, id, email, name, first_name_length, is_notified_for_invoices, created_date, created_by, updated_date, updated_by) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", accountRecordId, accountId.toString(), "yo@t.com", "toto", 4, false, new Date(), "i", new Date(), "j");
return null;
}
});
checkPagination(0);
eventsListener.pushExpectedEvent(NextEvent.TAG);
tagUserApi.addTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
assertListenerStatus();
checkPagination(1);
// Verify the tag was saved
final List<Tag> tags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, true, callContext);
Assert.assertEquals(tags.size(), 1);
Assert.assertEquals(tags.get(0).getTagDefinitionId(), ControlTagType.WRITTEN_OFF.getId());
Assert.assertEquals(tags.get(0).getObjectId(), accountId);
Assert.assertEquals(tags.get(0).getObjectType(), ObjectType.ACCOUNT);
// Verify the account_record_id was populated
dbi.withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(final Handle handle) throws Exception {
final List<Map<String, Object>> values = handle.select("select account_record_id from tags where object_id = ?", accountId.toString());
Assert.assertEquals(values.size(), 1);
Assert.assertEquals(values.get(0).keySet().size(), 1);
Assert.assertEquals(Long.valueOf(values.get(0).get("account_record_id").toString()), accountRecordId);
return null;
}
});
eventsListener.pushExpectedEvent(NextEvent.TAG);
tagUserApi.removeTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
assertListenerStatus();
List<Tag> remainingTags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, false, callContext);
Assert.assertEquals(remainingTags.size(), 0);
checkPagination(0);
// Add again the tag
eventsListener.pushExpectedEvent(NextEvent.TAG);
tagUserApi.addTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
assertListenerStatus();
remainingTags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, false, callContext);
Assert.assertEquals(remainingTags.size(), 1);
checkPagination(1);
// Delete again
eventsListener.pushExpectedEvent(NextEvent.TAG);
tagUserApi.removeTags(accountId, ObjectType.ACCOUNT, ImmutableList.<UUID>of(ControlTagType.WRITTEN_OFF.getId()), callContext);
assertListenerStatus();
remainingTags = tagUserApi.getTagsForObject(accountId, ObjectType.ACCOUNT, false, callContext);
Assert.assertEquals(remainingTags.size(), 0);
checkPagination(0);
}
use of org.skife.jdbi.v2.tweak.HandleCallback in project druid by druid-io.
the class HadoopConverterJobTest method setUp.
@Before
public void setUp() throws Exception {
final MetadataStorageUpdaterJobSpec metadataStorageUpdaterJobSpec = new MetadataStorageUpdaterJobSpec() {
@Override
public String getSegmentTable() {
return derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable();
}
@Override
public MetadataStorageConnectorConfig get() {
return derbyConnectorRule.getMetadataConnectorConfig();
}
};
final File scratchFileDir = temporaryFolder.newFolder();
storageLocProperty = System.getProperty(STORAGE_PROPERTY_KEY);
tmpSegmentDir = temporaryFolder.newFolder();
System.setProperty(STORAGE_PROPERTY_KEY, tmpSegmentDir.getAbsolutePath());
final URL url = Preconditions.checkNotNull(Query.class.getClassLoader().getResource("druid.sample.tsv"));
final File tmpInputFile = temporaryFolder.newFile();
FileUtils.retryCopy(new ByteSource() {
@Override
public InputStream openStream() throws IOException {
return url.openStream();
}
}, tmpInputFile, FileUtils.IS_EXCEPTION, 3);
final HadoopDruidIndexerConfig hadoopDruidIndexerConfig = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(new DataSchema(DATASOURCE, HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(new StringInputRowParser(new DelimitedParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList(TestIndex.DIMENSIONS)), null, null), "\t", "", Arrays.asList(TestIndex.COLUMNS)), null), Map.class), new AggregatorFactory[] { new DoubleSumAggregatorFactory(TestIndex.METRICS[0], TestIndex.METRICS[0]), new HyperUniquesAggregatorFactory("quality_uniques", "quality") }, new UniformGranularitySpec(Granularities.MONTH, Granularities.DAY, ImmutableList.<Interval>of(interval)), HadoopDruidIndexerConfig.JSON_MAPPER), new HadoopIOConfig(ImmutableMap.<String, Object>of("type", "static", "paths", tmpInputFile.getAbsolutePath()), metadataStorageUpdaterJobSpec, tmpSegmentDir.getAbsolutePath()), new HadoopTuningConfig(scratchFileDir.getAbsolutePath(), null, null, null, null, null, false, false, false, false, null, false, false, null, null, null, false, false)));
metadataStorageTablesConfigSupplier = derbyConnectorRule.metadataTablesConfigSupplier();
connector = derbyConnectorRule.getConnector();
try {
connector.getDBI().withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(Handle handle) throws Exception {
handle.execute("DROP TABLE druid_segments");
return null;
}
});
} catch (CallbackFailedException e) {
// Who cares
}
List<Jobby> jobs = ImmutableList.of(new Jobby() {
@Override
public boolean run() {
connector.createSegmentTable(metadataStorageUpdaterJobSpec.getSegmentTable());
return true;
}
}, new HadoopDruidDetermineConfigurationJob(hadoopDruidIndexerConfig), new HadoopDruidIndexerJob(hadoopDruidIndexerConfig, new SQLMetadataStorageUpdaterJobHandler(connector)));
JobHelper.runJobs(jobs, hadoopDruidIndexerConfig);
}
Aggregations