Search in sources :

Example 51 with ConfigException

use of org.apache.kafka.common.config.ConfigException in project kafka by apache.

the class InsertField method configure.

@Override
public void configure(Map<String, ?> props) {
    final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props);
    topicField = InsertionSpec.parse(config.getString(ConfigName.TOPIC_FIELD));
    partitionField = InsertionSpec.parse(config.getString(ConfigName.PARTITION_FIELD));
    offsetField = InsertionSpec.parse(config.getString(ConfigName.OFFSET_FIELD));
    timestampField = InsertionSpec.parse(config.getString(ConfigName.TIMESTAMP_FIELD));
    staticField = InsertionSpec.parse(config.getString(ConfigName.STATIC_FIELD));
    staticValue = config.getString(ConfigName.STATIC_VALUE);
    if (topicField == null && partitionField == null && offsetField == null && timestampField == null && staticField == null) {
        throw new ConfigException("No field insertion configured");
    }
    if (staticField != null && staticValue == null) {
        throw new ConfigException(ConfigName.STATIC_VALUE, null, "No value specified for static field: " + staticField);
    }
    schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16));
}
Also used : SimpleConfig(org.apache.kafka.connect.transforms.util.SimpleConfig) LRUCache(org.apache.kafka.common.cache.LRUCache) ConfigException(org.apache.kafka.common.config.ConfigException)

Example 52 with ConfigException

use of org.apache.kafka.common.config.ConfigException in project kafka by apache.

the class SetSchemaMetadata method configure.

@Override
public void configure(Map<String, ?> configs) {
    final SimpleConfig config = new SimpleConfig(CONFIG_DEF, configs);
    schemaName = config.getString(ConfigName.SCHEMA_NAME);
    schemaVersion = config.getInt(ConfigName.SCHEMA_VERSION);
    if (schemaName == null && schemaVersion == null) {
        throw new ConfigException("Neither schema name nor version configured");
    }
}
Also used : SimpleConfig(org.apache.kafka.connect.transforms.util.SimpleConfig) ConfigException(org.apache.kafka.common.config.ConfigException)

Example 53 with ConfigException

use of org.apache.kafka.common.config.ConfigException in project kafka by apache.

the class StreamsConfigTest method shouldThrowConfigExceptionIfMaxInFlightRequestsPerConnectionIsInvalidStringIfEosEnabled.

private void shouldThrowConfigExceptionIfMaxInFlightRequestsPerConnectionIsInvalidStringIfEosEnabled() {
    props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "not-a-number");
    try {
        new StreamsConfig(props).getProducerConfigs(clientId);
        fail("Should throw ConfigException when EOS is enabled and maxInFlight cannot be paresed into an integer");
    } catch (final ConfigException e) {
        assertEquals("Invalid value not-a-number for configuration max.in.flight.requests.per.connection:" + " String value could not be parsed as 32-bit integer", e.getMessage());
    }
}
Also used : ConfigException(org.apache.kafka.common.config.ConfigException) StreamsTestUtils.getStreamsConfig(org.apache.kafka.test.StreamsTestUtils.getStreamsConfig)

Example 54 with ConfigException

use of org.apache.kafka.common.config.ConfigException in project kafka by apache.

the class TopicAdminTest method verifyingTopicCleanupPolicyShouldFailWhenTopicHasDeletePolicy.

@Test
public void verifyingTopicCleanupPolicyShouldFailWhenTopicHasDeletePolicy() {
    String topicName = "myTopic";
    Map<String, String> topicConfigs = Collections.singletonMap("cleanup.policy", "delete");
    Cluster cluster = createCluster(1);
    try (MockAdminClient mockAdminClient = new MockAdminClient(cluster.nodes(), cluster.nodeById(0))) {
        TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, cluster.nodeById(0), cluster.nodes(), Collections.emptyList());
        mockAdminClient.addTopic(false, topicName, Collections.singletonList(topicPartitionInfo), topicConfigs);
        TopicAdmin admin = new TopicAdmin(null, mockAdminClient);
        ConfigException e = assertThrows(ConfigException.class, () -> admin.verifyTopicCleanupPolicyOnlyCompact("myTopic", "worker.topic", "purpose"));
        assertTrue(e.getMessage().contains("to guarantee consistency and durability"));
    }
}
Also used : TopicPartitionInfo(org.apache.kafka.common.TopicPartitionInfo) Cluster(org.apache.kafka.common.Cluster) MockAdminClient(org.apache.kafka.clients.admin.MockAdminClient) ConfigException(org.apache.kafka.common.config.ConfigException) Test(org.junit.Test)

Example 55 with ConfigException

use of org.apache.kafka.common.config.ConfigException in project kafka by apache.

the class KafkaLog4jAppender method activateOptions.

@Override
public void activateOptions() {
    // check for config parameter validity
    Properties props = new Properties();
    if (brokerList != null)
        props.put(BOOTSTRAP_SERVERS_CONFIG, brokerList);
    if (props.isEmpty())
        throw new ConfigException("The bootstrap servers property should be specified");
    if (topic == null)
        throw new ConfigException("Topic must be specified by the Kafka log4j appender");
    if (compressionType != null)
        props.put(COMPRESSION_TYPE_CONFIG, compressionType);
    props.put(ACKS_CONFIG, Integer.toString(requiredNumAcks));
    props.put(RETRIES_CONFIG, retries);
    props.put(DELIVERY_TIMEOUT_MS_CONFIG, deliveryTimeoutMs);
    props.put(LINGER_MS_CONFIG, lingerMs);
    props.put(BATCH_SIZE_CONFIG, batchSize);
    if (securityProtocol != null) {
        props.put(SECURITY_PROTOCOL_CONFIG, securityProtocol);
    }
    if (securityProtocol != null && (securityProtocol.contains("SSL") || securityProtocol.contains("SASL"))) {
        if (sslEngineFactoryClass != null) {
            props.put(SSL_ENGINE_FACTORY_CLASS_CONFIG, sslEngineFactoryClass);
        }
    }
    if (securityProtocol != null && securityProtocol.contains("SSL") && sslTruststoreLocation != null && sslTruststorePassword != null) {
        props.put(SSL_TRUSTSTORE_LOCATION_CONFIG, sslTruststoreLocation);
        props.put(SSL_TRUSTSTORE_PASSWORD_CONFIG, sslTruststorePassword);
        if (sslKeystoreType != null && sslKeystoreLocation != null && sslKeystorePassword != null) {
            props.put(SSL_KEYSTORE_TYPE_CONFIG, sslKeystoreType);
            props.put(SSL_KEYSTORE_LOCATION_CONFIG, sslKeystoreLocation);
            props.put(SSL_KEYSTORE_PASSWORD_CONFIG, sslKeystorePassword);
        }
    }
    if (securityProtocol != null && securityProtocol.contains("SASL") && saslKerberosServiceName != null && clientJaasConfPath != null) {
        props.put(SASL_KERBEROS_SERVICE_NAME, saslKerberosServiceName);
        System.setProperty("java.security.auth.login.config", clientJaasConfPath);
    }
    if (kerb5ConfPath != null) {
        System.setProperty("java.security.krb5.conf", kerb5ConfPath);
    }
    if (saslMechanism != null) {
        props.put(SASL_MECHANISM, saslMechanism);
    }
    if (clientJaasConf != null) {
        props.put(SASL_JAAS_CONFIG, clientJaasConf);
    }
    if (maxBlockMs != null) {
        props.put(MAX_BLOCK_MS_CONFIG, maxBlockMs);
    }
    props.put(KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
    props.put(VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
    this.producer = getKafkaProducer(props);
    LogLog.debug("Kafka producer connected to " + brokerList);
    LogLog.debug("Logging for topic: " + topic);
}
Also used : ConfigException(org.apache.kafka.common.config.ConfigException) Properties(java.util.Properties) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer)

Aggregations

ConfigException (org.apache.kafka.common.config.ConfigException)136 HashMap (java.util.HashMap)29 Test (org.junit.jupiter.api.Test)28 Test (org.junit.Test)20 Properties (java.util.Properties)10 KafkaException (org.apache.kafka.common.KafkaException)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Pattern (java.util.regex.Pattern)9 Serde (org.apache.kafka.common.serialization.Serde)8 SimpleConfig (org.apache.kafka.connect.transforms.util.SimpleConfig)8 File (java.io.File)7 SSLContext (javax.net.ssl.SSLContext)7 Map (java.util.Map)6 ByteArraySerializer (org.apache.kafka.common.serialization.ByteArraySerializer)6 KeyStore (java.security.KeyStore)5 KeyManagerFactory (javax.net.ssl.KeyManagerFactory)5 TrustManagerFactory (javax.net.ssl.TrustManagerFactory)5 ConfigDef (org.apache.kafka.common.config.ConfigDef)5 IOException (java.io.IOException)4