use of org.apache.kafka.common.config.ConfigException in project kafka by apache.
the class SslFactoryTest method verifyKeystoreVerifiableUsingTruststore.
private void verifyKeystoreVerifiableUsingTruststore(boolean usePem, String tlsProtocol) throws Exception {
File trustStoreFile1 = usePem ? null : File.createTempFile("truststore1", ".jks");
Map<String, Object> sslConfig1 = sslConfigsBuilder(Mode.SERVER).createNewTrustStore(trustStoreFile1).usePem(usePem).build();
SslFactory sslFactory = new SslFactory(Mode.SERVER, null, true);
sslFactory.configure(sslConfig1);
File trustStoreFile2 = usePem ? null : File.createTempFile("truststore2", ".jks");
Map<String, Object> sslConfig2 = sslConfigsBuilder(Mode.SERVER).createNewTrustStore(trustStoreFile2).usePem(usePem).build();
// listener to stores that may not work with other brokers where the update hasn't yet been performed.
try {
sslFactory.validateReconfiguration(sslConfig2);
fail("ValidateReconfiguration did not fail as expected");
} catch (ConfigException e) {
// Expected exception
}
}
use of org.apache.kafka.common.config.ConfigException in project kafka by apache.
the class KafkaAdminClientTest method testDefaultApiTimeoutAndRequestTimeoutConflicts.
@Test
public void testDefaultApiTimeoutAndRequestTimeoutConflicts() {
final AdminClientConfig config = newConfMap(AdminClientConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, "500");
KafkaException exception = assertThrows(KafkaException.class, () -> KafkaAdminClient.createInternal(config, null));
assertTrue(exception.getCause() instanceof ConfigException);
}
use of org.apache.kafka.common.config.ConfigException in project kafka by apache.
the class WorkerConfig method validateHttpResponseHeaderConfig.
// Visible for testing
static void validateHttpResponseHeaderConfig(String config) {
try {
// validate format
String[] configTokens = config.trim().split("\\s+", 2);
if (configTokens.length != 2) {
throw new ConfigException(String.format("Invalid format of header config '%s'. " + "Expected: '[ation] [header name]:[header value]'", config));
}
// validate action
String method = configTokens[0].trim();
validateHeaderConfigAction(method);
// validate header name and header value pair
String header = configTokens[1];
String[] headerTokens = header.trim().split(":");
if (headerTokens.length != 2) {
throw new ConfigException(String.format("Invalid format of header name and header value pair '%s'. " + "Expected: '[header name]:[header value]'", header));
}
// validate header name
String headerName = headerTokens[0].trim();
if (headerName.isEmpty() || headerName.matches(".*\\s+.*")) {
throw new ConfigException(String.format("Invalid header name '%s'. " + "The '[header name]' cannot contain whitespace", headerName));
}
} catch (ArrayIndexOutOfBoundsException e) {
throw new ConfigException(String.format("Invalid header config '%s'.", config), e);
}
}
use of org.apache.kafka.common.config.ConfigException in project kafka by apache.
the class SinkConnectorConfig method validate.
/**
* Throw an exception if the passed-in properties do not constitute a valid sink.
* @param props sink configuration properties
*/
public static void validate(Map<String, String> props) {
final boolean hasTopicsConfig = hasTopicsConfig(props);
final boolean hasTopicsRegexConfig = hasTopicsRegexConfig(props);
final boolean hasDlqTopicConfig = hasDlqTopicConfig(props);
if (hasTopicsConfig && hasTopicsRegexConfig) {
throw new ConfigException(SinkTask.TOPICS_CONFIG + " and " + SinkTask.TOPICS_REGEX_CONFIG + " are mutually exclusive options, but both are set.");
}
if (!hasTopicsConfig && !hasTopicsRegexConfig) {
throw new ConfigException("Must configure one of " + SinkTask.TOPICS_CONFIG + " or " + SinkTask.TOPICS_REGEX_CONFIG);
}
if (hasDlqTopicConfig) {
String dlqTopic = props.get(DLQ_TOPIC_NAME_CONFIG).trim();
if (hasTopicsConfig) {
List<String> topics = parseTopicsList(props);
if (topics.contains(dlqTopic)) {
throw new ConfigException(String.format("The DLQ topic '%s' may not be included in the list of " + "topics ('%s=%s') consumed by the connector", dlqTopic, SinkTask.TOPICS_REGEX_CONFIG, topics));
}
}
if (hasTopicsRegexConfig) {
String topicsRegexStr = props.get(SinkTask.TOPICS_REGEX_CONFIG);
Pattern pattern = Pattern.compile(topicsRegexStr);
if (pattern.matcher(dlqTopic).matches()) {
throw new ConfigException(String.format("The DLQ topic '%s' may not be included in the regex matching the " + "topics ('%s=%s') consumed by the connector", dlqTopic, SinkTask.TOPICS_REGEX_CONFIG, topicsRegexStr));
}
}
}
}
use of org.apache.kafka.common.config.ConfigException in project kafka by apache.
the class JmxReporter method compilePredicate.
public static Predicate<String> compilePredicate(Map<String, ?> originalConfig) {
Map<String, ?> configs = ConfigUtils.translateDeprecatedConfigs(originalConfig, new String[][] { { INCLUDE_CONFIG, INCLUDE_CONFIG_ALIAS }, { EXCLUDE_CONFIG, EXCLUDE_CONFIG_ALIAS } });
String include = (String) configs.get(INCLUDE_CONFIG);
String exclude = (String) configs.get(EXCLUDE_CONFIG);
if (include == null) {
include = DEFAULT_INCLUDE;
}
if (exclude == null) {
exclude = DEFAULT_EXCLUDE;
}
try {
Pattern includePattern = Pattern.compile(include);
Pattern excludePattern = Pattern.compile(exclude);
return s -> includePattern.matcher(s).matches() && !excludePattern.matcher(s).matches();
} catch (PatternSyntaxException e) {
throw new ConfigException("JMX filter for configuration" + METRICS_CONFIG_PREFIX + ".(include/exclude) is not a valid regular expression");
}
}
Aggregations