use of io.debezium.config.Configuration in project debezium by debezium.
the class MySqlConnectorReplicaIT method beforeAll.
@BeforeClass
public static void beforeAll() throws InterruptedException {
Testing.Print.enable();
// We need to wait for the replica to catch up to the master
Configuration system = Configuration.fromSystemProperties("");
Configuration master = Configuration.create().with("database.hostname", system.getString("database.hostname", "localhost")).with("database.port", system.getInteger("database.port", 3306)).with("database.user", system.getString("database.user", "mysqluser")).with("database.password", system.getString("database.password", "mysqlpw")).build();
Configuration replica = Configuration.create().with("database.hostname", system.getString("database.hostname", "localhost")).with("database.port", system.getInteger("database.replica.port", 4306)).with("database.user", system.getString("database.replica.user", "mysqlreplica")).with("database.password", system.getString("database.replica.password", "mysqlpw")).build();
waitForGtidSetsToMatch(master, replica, 10, TimeUnit.SECONDS);
}
use of io.debezium.config.Configuration in project debezium by debezium.
the class BaseSourceTask method start.
@Override
public final void start(Map<String, String> props) {
if (context == null) {
throw new ConnectException("Unexpected null context");
}
Configuration config = Configuration.from(props);
if (!config.validateAndRecord(getAllConfigurationFields(), LOGGER::error)) {
throw new ConnectException("Error configuring an instance of " + getClass().getSimpleName() + "; check the logs for details");
}
LOGGER.info("Starting " + getClass().getSimpleName() + " with configuration:");
config.forEach((propName, propValue) -> {
LOGGER.info(" {} = {}", propName, propValue);
});
start(config);
}
use of io.debezium.config.Configuration in project debezium by debezium.
the class ByLogicalTableRouter method configure.
@Override
public void configure(Map<String, ?> props) {
Configuration config = Configuration.from(props);
final Field.Set configFields = Field.setOf(TOPIC_REGEX, TOPIC_REPLACEMENT, KEY_FIELD_REGEX, KEY_FIELD_REPLACEMENT);
if (!config.validateAndRecord(configFields, logger::error)) {
throw new ConnectException("Unable to validate config.");
}
topicRegex = Pattern.compile(config.getString(TOPIC_REGEX));
topicReplacement = config.getString(TOPIC_REPLACEMENT);
String keyFieldRegexString = config.getString(KEY_FIELD_REGEX);
if (keyFieldRegexString != null) {
keyFieldRegexString = keyFieldRegexString.trim();
}
if (keyFieldRegexString != null && !keyFieldRegexString.isEmpty()) {
keyFieldRegex = Pattern.compile(config.getString(KEY_FIELD_REGEX));
keyFieldReplacement = config.getString(KEY_FIELD_REPLACEMENT);
}
keyFieldName = config.getString(KEY_FIELD_NAME);
}
use of io.debezium.config.Configuration in project debezium by debezium.
the class UnwrapFromEnvelope method configure.
@Override
public void configure(final Map<String, ?> configs) {
final Configuration config = Configuration.from(configs);
final Field.Set configFields = Field.setOf(DROP_TOMBSTONES, DROP_DELETES);
if (!config.validateAndRecord(configFields, logger::error)) {
throw new ConnectException("Unable to validate config.");
}
dropTombstones = config.getBoolean(DROP_TOMBSTONES);
dropDeletes = config.getBoolean(DROP_DELETES);
final Map<String, String> delegateConfig = new HashMap<>();
delegateConfig.put("field", "after");
delegate.configure(delegateConfig);
}
use of io.debezium.config.Configuration in project debezium by debezium.
the class KafkaDatabaseHistoryTest method shouldIgnoreUnparseableMessages.
@Test
public void shouldIgnoreUnparseableMessages() throws Exception {
// Create the empty topic ...
kafka.createTopic(topicName, 1, 1);
// Create invalid records
final ProducerRecord<String, String> nullRecord = new ProducerRecord<>(topicName, PARTITION_NO, null, null);
final ProducerRecord<String, String> emptyRecord = new ProducerRecord<>(topicName, PARTITION_NO, null, "");
final ProducerRecord<String, String> noSourceRecord = new ProducerRecord<>(topicName, PARTITION_NO, null, "{\"position\":{\"filename\":\"my-txn-file.log\",\"position\":39},\"databaseName\":\"db1\",\"ddl\":\"DROP TABLE foo;\"}");
final ProducerRecord<String, String> noPositionRecord = new ProducerRecord<>(topicName, PARTITION_NO, null, "{\"source\":{\"server\":\"my-server\"},\"databaseName\":\"db1\",\"ddl\":\"DROP TABLE foo;\"}");
final ProducerRecord<String, String> invalidJSONRecord1 = new ProducerRecord<>(topicName, PARTITION_NO, null, "{\"source\":{\"server\":\"my-server\"},\"position\":{\"filename\":\"my-txn-file.log\",\"position\":39},\"databaseName\":\"db1\",\"ddl\":\"DROP TABLE foo;\"");
final ProducerRecord<String, String> invalidJSONRecord2 = new ProducerRecord<>(topicName, PARTITION_NO, null, "\"source\":{\"server\":\"my-server\"},\"position\":{\"filename\":\"my-txn-file.log\",\"position\":39},\"databaseName\":\"db1\",\"ddl\":\"DROP TABLE foo;\"}");
final ProducerRecord<String, String> invalidSQL = new ProducerRecord<>(topicName, PARTITION_NO, null, "{\"source\":{\"server\":\"my-server\"},\"position\":{\"filename\":\"my-txn-file.log\",\"position\":39},\"databaseName\":\"db1\",\"ddl\":\"xxxDROP TABLE foo;\"}");
final Configuration intruderConfig = Configuration.create().withDefault(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.brokerList()).withDefault(ProducerConfig.CLIENT_ID_CONFIG, "intruder").withDefault(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class).withDefault(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class).build();
try (final KafkaProducer<String, String> producer = new KafkaProducer<>(intruderConfig.asProperties())) {
producer.send(nullRecord).get();
producer.send(emptyRecord).get();
producer.send(noSourceRecord).get();
producer.send(noPositionRecord).get();
producer.send(invalidJSONRecord1).get();
producer.send(invalidJSONRecord2).get();
producer.send(invalidSQL).get();
}
testHistoryTopicContent(true);
}
Aggregations