use of org.skife.jdbi.v2.sqlobject.Bind in project SpinalTap by airbnb.
the class MysqlSchemaStore method getLatest.
@Override
public MysqlTableSchema getLatest(@NotNull final String database, @NotNull final String table) {
try (Handle handle = jdbi.open()) {
String schemaInfo = MysqlSchemaUtil.STRING_RETRYER.call(() -> handle.createQuery(String.format(GET_LATEST_SCHEMA_QUERY, source)).bind("database", database).bind("table", table).map(StringColumnMapper.INSTANCE).first());
metrics.schemaStoreGetSuccess(database, table);
return deserializeSchemaInfo(schemaInfo);
} catch (Exception ex) {
log.error(String.format("Failed to get latest schema of database: %s table: %s. Does it exist?", database, table), ex);
metrics.schemaStoreGetFailure(database, table, ex);
Throwables.throwIfUnchecked(ex);
throw new RuntimeException(ex);
}
}
use of org.skife.jdbi.v2.sqlobject.Bind in project providence by morimekta.
the class MessageInserterTest method testDefaultMapping.
@Test
public void testDefaultMapping() {
generator.setFillRate(1.0).setMaxCollectionItems(16);
OptionalFields expected = generator.generate(OptionalFields.kDescriptor).mutate().setId(1234).setTimestampS((int) clock.instant().getEpochSecond()).setTimestampMs(clock.instant().getEpochSecond() * 1000).build();
OptionalFields empty = OptionalFields.builder().setId(2345).build();
try (Handle handle = db.getDBI().open()) {
INSERTER.execute(handle, expected, empty);
OptionalFields val = handle.createQuery("SELECT * FROM mappings.default_mappings WHERE id = :id").bind("id", expected.getId()).map(ProvidenceJdbi.toMessage(OptionalFields.kDescriptor, ProvidenceJdbi.columnsFromAllFields(), ProvidenceJdbi.withColumn("compact", MESSAGE), ProvidenceJdbi.withColumn("other_message", CLOB_MESSAGE))).first();
OptionalFields val2 = handle.createQuery("SELECT * FROM mappings.default_mappings WHERE id = :id").bind("id", empty.getId()).map(ProvidenceJdbi.toMessage(OptionalFields.kDescriptor, ProvidenceJdbi.columnsFromAllFields(), ProvidenceJdbi.withColumn("compact", MESSAGE), ProvidenceJdbi.withColumn("other_message", CLOB_MESSAGE))).first();
assertThat(val, is(equalToMessage(expected)));
assertThat(val2, is(equalToMessage(empty)));
}
}
use of org.skife.jdbi.v2.sqlobject.Bind in project providence by morimekta.
the class MessageRowMapperTest method testDefaultMapping.
@Test
public void testDefaultMapping() {
generator.setFillRate(1.0).setMaxCollectionItems(16).withGenerator(CompactFields.kDescriptor, g -> {
g.setValueGenerator(CompactFields._Field.NAME, ctx -> ctx.getFairy().textProducer().latinWord());
g.setValueGenerator(CompactFields._Field.LABEL, ctx -> ctx.getFairy().textProducer().word());
}).withGenerator(NormalFields.kDescriptor, g -> {
g.setValueGenerator(NormalFields._Field.NAME, ctx -> ctx.getFairy().textProducer().latinWord());
g.setValueGenerator(NormalFields._Field.LABEL, ctx -> ctx.getFairy().textProducer().word());
});
OptionalFields expected = generator.generate(OptionalFields.kDescriptor).mutate().setId(1234).setTimestampS((int) clock.instant().getEpochSecond()).setTimestampMs(clock.instant().getEpochSecond() * 1000).build();
OptionalFields empty = OptionalFields.builder().setId(2345).build();
try (Handle handle = db.getDBI().open()) {
handle.createStatement("INSERT INTO mappings.default_mappings (" + " id, present, tiny, small, medium, large, real," + " fib, name, data, compact," + " timestamp_s, timestamp_ms," + " binary_message, blob_message, other_message," + " blob_data, base64_data, int_bool" + ") VALUES (" + " :e.id," + " :e.present," + " :e.tiny," + " :e.small," + " :e.medium," + " :e.large," + " :e.real," + " :e.fib," + " :e.name," + " :e.data," + " :e.message," + " :timestamp_s," + " :e.timestamp_ms," + " :e.binary_message," + " :e.blob_message," + " :e.clob_message," + " :e.blob_data," + " :e.base64_data," + " :e.int_bool" + ")").bind("timestamp_s", ProvidenceJdbi.toField(expected, TIMESTAMP_S, Types.TIMESTAMP)).bindNamedArgumentFinder(ProvidenceJdbi.forMessage("e", expected, ProvidenceJdbi.withType(TIMESTAMP_MS, Types.TIMESTAMP), ProvidenceJdbi.withType(BINARY_MESSAGE, Types.BINARY), ProvidenceJdbi.withType(BLOB_MESSAGE, Types.BLOB), ProvidenceJdbi.withType(CLOB_MESSAGE, Types.CLOB), ProvidenceJdbi.withType(BLOB_DATA, Types.BLOB), ProvidenceJdbi.withType(BASE64_DATA, Types.VARCHAR))).execute();
handle.createStatement("INSERT INTO mappings.default_mappings (" + " id, present, tiny, small, medium, large, real," + " fib, name, data, compact," + " timestamp_s, timestamp_ms," + " binary_message, blob_message, other_message," + " blob_data, base64_data, int_bool" + ") VALUES (" + " :id," + " :present," + " :tiny," + " :small," + " :medium," + " :large," + " :real," + " :fib," + " :name," + " :data," + " :message," + " :timestamp_s," + " :timestamp_ms," + " :binary_message," + " :blob_message," + " :clob_message," + " :blob_data," + " :base64_data," + " :int_bool" + ")").bind("timestamp_s", ProvidenceJdbi.toField(empty, TIMESTAMP_S, Types.TIMESTAMP)).bindNamedArgumentFinder(ProvidenceJdbi.forMessage(empty, ProvidenceJdbi.withType(TIMESTAMP_MS, Types.TIMESTAMP), ProvidenceJdbi.withType(BINARY_MESSAGE, Types.BINARY), ProvidenceJdbi.withType(BLOB_MESSAGE, Types.BLOB), ProvidenceJdbi.withType(CLOB_MESSAGE, Types.CLOB), ProvidenceJdbi.withType(BLOB_DATA, Types.BLOB), ProvidenceJdbi.withType(BASE64_DATA, Types.VARCHAR), ProvidenceJdbi.withType(INT_BOOL, Types.INTEGER))).execute();
OptionalFields val = handle.createQuery("SELECT m.* FROM mappings.default_mappings m WHERE id = :id").bind("id", ProvidenceJdbi.toField(expected, ID)).map(ProvidenceJdbi.toMessage("default_mappings", OptionalFields.kDescriptor, ProvidenceJdbi.columnsFromAllFields(), ProvidenceJdbi.withColumn("compact", MESSAGE), ProvidenceJdbi.withColumn("other_message", CLOB_MESSAGE))).first();
OptionalFields val2 = handle.createQuery("SELECT * FROM mappings.default_mappings WHERE id = :id").bind("id", ProvidenceJdbi.toField(empty, ID)).map(ProvidenceJdbi.toMessage(OptionalFields.kDescriptor, ProvidenceJdbi.columnsFromAllFields(), ProvidenceJdbi.withColumn("compact", MESSAGE), ProvidenceJdbi.withColumn("other_message", CLOB_MESSAGE))).first();
assertThat(val, is(equalToMessage(expected)));
assertThat(val2, is(equalToMessage(empty)));
}
}
use of org.skife.jdbi.v2.sqlobject.Bind in project druid by druid-io.
the class SqlSegmentsMetadataManager method retrieveUnusedSegments.
private List<DataSegment> retrieveUnusedSegments(final String dataSource, final Set<String> segmentIds, final Handle handle) throws UnknownSegmentIdsException {
List<String> unknownSegmentIds = new ArrayList<>();
List<DataSegment> segments = segmentIds.stream().map(segmentId -> {
Iterator<DataSegment> segmentResultIterator = handle.createQuery(StringUtils.format("SELECT used, payload FROM %1$s WHERE dataSource = :dataSource AND id = :id", getSegmentsTable())).bind("dataSource", dataSource).bind("id", segmentId).map((int index, ResultSet resultSet, StatementContext context) -> {
try {
if (!resultSet.getBoolean("used")) {
return jsonMapper.readValue(resultSet.getBytes("payload"), DataSegment.class);
} else {
// We emit nulls for used segments. They are filtered out below in this method.
return null;
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}).iterator();
if (!segmentResultIterator.hasNext()) {
unknownSegmentIds.add(segmentId);
return null;
} else {
@Nullable DataSegment segment = segmentResultIterator.next();
if (segmentResultIterator.hasNext()) {
log.error("There is more than one row corresponding to segment id [%s] in data source [%s] in the database", segmentId, dataSource);
}
return segment;
}
}).filter(// Filter nulls corresponding to used segments.
Objects::nonNull).collect(Collectors.toList());
if (!unknownSegmentIds.isEmpty()) {
throw new UnknownSegmentIdsException(unknownSegmentIds);
}
return segments;
}
use of org.skife.jdbi.v2.sqlobject.Bind in project killbill by killbill.
the class DatabaseExportDao method exportDataForAccountAndTable.
private void exportDataForAccountAndTable(final DatabaseExportOutputStream out, final List<ColumnInfo> columnsForTable, final InternalTenantContext context) {
TableType tableType = TableType.OTHER;
final String tableName = columnsForTable.get(0).getTableName();
// Ignore casing (for H2)
if (TableName.ACCOUNT.getTableName().equalsIgnoreCase(tableName)) {
tableType = TableType.KB_ACCOUNT;
} else if (TableName.ACCOUNT_HISTORY.getTableName().equalsIgnoreCase(tableName)) {
tableType = TableType.KB_ACCOUNT_HISTORY;
}
boolean firstColumn = true;
final StringBuilder queryBuilder = new StringBuilder("select ");
for (final ColumnInfo column : columnsForTable) {
if (!firstColumn) {
queryBuilder.append(", ");
} else {
firstColumn = false;
}
queryBuilder.append(column.getColumnName());
if (tableType == TableType.OTHER) {
// Ignore casing (for H2)
if (column.getColumnName().equalsIgnoreCase(TableType.KB_PER_ACCOUNT.getAccountRecordIdColumnName())) {
tableType = TableType.KB_PER_ACCOUNT;
} else if (column.getColumnName().equalsIgnoreCase(TableType.NOTIFICATION.getAccountRecordIdColumnName())) {
tableType = TableType.NOTIFICATION;
}
}
}
// Don't export non-account specific tables
if (tableType == TableType.OTHER) {
return;
}
// Build the query - make sure to filter by account and tenant!
queryBuilder.append(" from ").append(tableName).append(" where ").append(tableType.getAccountRecordIdColumnName()).append(" = :accountRecordId and ").append(tableType.getTenantRecordIdColumnName()).append(" = :tenantRecordId");
// Notify the stream that we're about to write data for a different table
out.newTable(tableName, columnsForTable);
dbi.withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(final Handle handle) throws Exception {
final ResultIterator<Map<String, Object>> iterator = handle.createQuery(queryBuilder.toString()).bind("accountRecordId", context.getAccountRecordId()).bind("tenantRecordId", context.getTenantRecordId()).iterator();
try {
while (iterator.hasNext()) {
final Map<String, Object> row = iterator.next();
for (final String k : row.keySet()) {
final Object value = row.get(k);
// See also LowerToCamelBeanMapper
if (value instanceof Blob) {
final Blob blob = (Blob) value;
row.put(k, blob.getBytes(0, (int) blob.length()));
} else if (value instanceof Clob) {
// TODO Update LowerToCamelBeanMapper?
final Clob clob = (Clob) value;
row.put(k, clob.getSubString(1, (int) clob.length()));
}
}
try {
out.write(row);
} catch (final IOException e) {
logger.warn("Unable to write row: {}", row, e);
throw e;
}
}
} finally {
iterator.close();
}
return null;
}
});
}
Aggregations