use of org.apache.kafka.clients.admin.Config in project ksql by confluentinc.
the class KafkaTopicClientImpl method topicConfig.
private Map<String, String> topicConfig(final String topicName, final boolean includeDefaults) {
final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
final List<ConfigResource> request = Collections.singletonList(resource);
try {
final Config config = executeWithRetries(() -> adminClient.describeConfigs(request).all()).get(resource);
return config.entries().stream().filter(e -> includeDefaults || e.source().equals(ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG)).collect(Collectors.toMap(ConfigEntry::name, ConfigEntry::value));
} catch (final Exception e) {
throw new KafkaResponseGetFailedException("Failed to get config for Kafka Topic " + topicName, e);
}
}
use of org.apache.kafka.clients.admin.Config in project debezium by debezium.
the class KafkaDatabaseHistory method getKafkaBrokerConfig.
private Config getKafkaBrokerConfig(AdminClient admin) throws Exception {
final Collection<Node> nodes = admin.describeCluster().nodes().get(KAFKA_QUERY_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
if (nodes.isEmpty()) {
throw new ConnectException("No brokers available to obtain default settings");
}
String nodeId = nodes.iterator().next().idString();
Set<ConfigResource> resources = Collections.singleton(new ConfigResource(ConfigResource.Type.BROKER, nodeId));
final Map<ConfigResource, Config> configs = admin.describeConfigs(resources).all().get(KAFKA_QUERY_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
if (configs.isEmpty()) {
throw new ConnectException("No configs have been received");
}
return configs.values().iterator().next();
}
use of org.apache.kafka.clients.admin.Config in project debezium by debezium.
the class KafkaDatabaseHistory method initializeStorage.
@Override
public void initializeStorage() {
super.initializeStorage();
try (AdminClient admin = AdminClient.create(this.producerConfig.asProperties())) {
// Find default replication factor
Config brokerConfig = getKafkaBrokerConfig(admin);
final short replicationFactor = Short.parseShort(brokerConfig.get(DEFAULT_TOPIC_REPLICATION_FACTOR_PROP_NAME).value());
// Create topic
final NewTopic topic = new NewTopic(topicName, (short) 1, replicationFactor);
topic.configs(Collect.hashMapOf("cleanup.policy", "delete", "retention.ms", Long.toString(Long.MAX_VALUE), "retention.bytes", "-1"));
admin.createTopics(Collections.singleton(topic));
logger.info("Database history topic '{}' created", topic);
} catch (Exception e) {
throw new ConnectException("Creation of database history topic failed, please create the topic manually", e);
}
}
Aggregations