Search in sources :

Example 1 with TopicStatus

use of org.apache.kafka.connect.runtime.TopicStatus in project kafka by apache.

the class KafkaStatusBackingStore method parseTopicStatus.

protected TopicStatus parseTopicStatus(byte[] data) {
    try {
        SchemaAndValue schemaAndValue = converter.toConnectData(statusTopic, data);
        if (!(schemaAndValue.value() instanceof Map)) {
            log.error("Invalid topic status value {}", schemaAndValue.value());
            return null;
        }
        @SuppressWarnings("unchecked") Object innerValue = ((Map<String, Object>) schemaAndValue.value()).get(TOPIC_STATE_KEY);
        if (!(innerValue instanceof Map)) {
            log.error("Invalid topic status value {} for field {}", innerValue, TOPIC_STATE_KEY);
            return null;
        }
        @SuppressWarnings("unchecked") Map<String, Object> topicStatusMetadata = (Map<String, Object>) innerValue;
        return new TopicStatus((String) topicStatusMetadata.get(TOPIC_NAME_KEY), (String) topicStatusMetadata.get(TOPIC_CONNECTOR_KEY), ((Long) topicStatusMetadata.get(TOPIC_TASK_KEY)).intValue(), (long) topicStatusMetadata.get(TOPIC_DISCOVER_TIMESTAMP_KEY));
    } catch (Exception e) {
        log.error("Failed to deserialize topic status", e);
        return null;
    }
}
Also used : TopicStatus(org.apache.kafka.connect.runtime.TopicStatus) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) RetriableException(org.apache.kafka.common.errors.RetriableException) ConfigException(org.apache.kafka.common.config.ConfigException) SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue)

Example 2 with TopicStatus

use of org.apache.kafka.connect.runtime.TopicStatus in project kafka by apache.

the class KafkaStatusBackingStore method readTopicStatus.

private void readTopicStatus(String key, byte[] value) {
    int delimiterPos = key.indexOf(':');
    int beginPos = TOPIC_STATUS_PREFIX.length();
    if (beginPos > delimiterPos) {
        log.warn("Discarding record with invalid topic status key {}", key);
        return;
    }
    String topic = key.substring(beginPos, delimiterPos);
    if (topic.isEmpty()) {
        log.warn("Discarding record with invalid topic status key containing empty topic {}", key);
        return;
    }
    beginPos = delimiterPos + TOPIC_STATUS_SEPARATOR.length();
    int endPos = key.length();
    if (beginPos > endPos) {
        log.warn("Discarding record with invalid topic status key {}", key);
        return;
    }
    String connector = key.substring(beginPos);
    if (connector.isEmpty()) {
        log.warn("Discarding record with invalid topic status key containing empty connector {}", key);
        return;
    }
    if (value == null) {
        log.trace("Removing status for topic {} and connector {}", topic, connector);
        removeTopic(topic, connector);
        return;
    }
    TopicStatus status = parseTopicStatus(value);
    if (status == null) {
        log.warn("Failed to parse topic status with key {}", key);
        return;
    }
    log.trace("Received topic status update {}", status);
    topics.computeIfAbsent(connector, k -> new ConcurrentHashMap<>()).put(topic, status);
}
Also used : WorkerConfig(org.apache.kafka.connect.runtime.WorkerConfig) KafkaBasedLog(org.apache.kafka.connect.util.KafkaBasedLog) SharedTopicAdmin(org.apache.kafka.connect.util.SharedTopicAdmin) Arrays(java.util.Arrays) ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) TopicAdmin(org.apache.kafka.connect.util.TopicAdmin) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) RetriableException(org.apache.kafka.common.errors.RetriableException) Supplier(java.util.function.Supplier) Schema(org.apache.kafka.connect.data.Schema) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) HashSet(java.util.HashSet) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopicStatus(org.apache.kafka.connect.runtime.TopicStatus) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) TopicConfig(org.apache.kafka.common.config.TopicConfig) Utils(org.apache.kafka.common.utils.Utils) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Callback(org.apache.kafka.connect.util.Callback) ConnectUtils(org.apache.kafka.connect.util.ConnectUtils) Logger(org.slf4j.Logger) Time(org.apache.kafka.common.utils.Time) AbstractStatus(org.apache.kafka.connect.runtime.AbstractStatus) SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue) Collection(java.util.Collection) NewTopic(org.apache.kafka.clients.admin.NewTopic) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) ConfigException(org.apache.kafka.common.config.ConfigException) DistributedConfig(org.apache.kafka.connect.runtime.distributed.DistributedConfig) Table(org.apache.kafka.connect.util.Table) Objects(java.util.Objects) ConnectorStatus(org.apache.kafka.connect.runtime.ConnectorStatus) List(java.util.List) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Struct(org.apache.kafka.connect.data.Struct) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Collections(java.util.Collections) TaskStatus(org.apache.kafka.connect.runtime.TaskStatus) TopicStatus(org.apache.kafka.connect.runtime.TopicStatus) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap)

Example 3 with TopicStatus

use of org.apache.kafka.connect.runtime.TopicStatus in project kafka by apache.

the class DistributedHerderTest method testDestroyConnector.

@Test
public void testDestroyConnector() throws Exception {
    EasyMock.expect(member.memberId()).andStubReturn("leader");
    EasyMock.expect(member.currentProtocolVersion()).andStubReturn(CONNECT_PROTOCOL_V0);
    // Start with one connector
    EasyMock.expect(worker.getPlugins()).andReturn(plugins);
    expectRebalance(1, Arrays.asList(CONN1), Collections.emptyList());
    expectPostRebalanceCatchup(SNAPSHOT);
    Capture<Callback<TargetState>> onStart = newCapture();
    worker.startConnector(EasyMock.eq(CONN1), EasyMock.anyObject(), EasyMock.anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED), capture(onStart));
    PowerMock.expectLastCall().andAnswer(() -> {
        onStart.getValue().onCompletion(null, TargetState.STARTED);
        return true;
    });
    EasyMock.expect(worker.isRunning(CONN1)).andReturn(true);
    EasyMock.expect(worker.connectorTaskConfigs(CONN1, conn1SinkConfig)).andReturn(TASK_CONFIGS);
    // And delete the connector
    member.wakeup();
    PowerMock.expectLastCall();
    configBackingStore.removeConnectorConfig(CONN1);
    PowerMock.expectLastCall();
    putConnectorCallback.onCompletion(null, new Herder.Created<>(false, null));
    PowerMock.expectLastCall();
    member.poll(EasyMock.anyInt());
    PowerMock.expectLastCall();
    // The change eventually is reflected to the config topic and the deleted connector and
    // tasks are revoked
    member.wakeup();
    PowerMock.expectLastCall();
    TopicStatus fooStatus = new TopicStatus(FOO_TOPIC, CONN1, 0, time.milliseconds());
    TopicStatus barStatus = new TopicStatus(BAR_TOPIC, CONN1, 0, time.milliseconds());
    EasyMock.expect(statusBackingStore.getAllTopics(EasyMock.eq(CONN1))).andReturn(new HashSet<>(Arrays.asList(fooStatus, barStatus))).times(2);
    statusBackingStore.deleteTopic(EasyMock.eq(CONN1), EasyMock.eq(FOO_TOPIC));
    PowerMock.expectLastCall().times(2);
    statusBackingStore.deleteTopic(EasyMock.eq(CONN1), EasyMock.eq(BAR_TOPIC));
    PowerMock.expectLastCall().times(2);
    expectRebalance(Arrays.asList(CONN1), Arrays.asList(TASK1), ConnectProtocol.Assignment.NO_ERROR, 2, Collections.emptyList(), Collections.emptyList(), 0);
    expectPostRebalanceCatchup(ClusterConfigState.EMPTY);
    member.requestRejoin();
    PowerMock.expectLastCall();
    PowerMock.replayAll();
    herder.deleteConnectorConfig(CONN1, putConnectorCallback);
    herder.tick();
    time.sleep(1000L);
    assertStatistics("leaderUrl", false, 3, 1, 100, 1000L);
    // read updated config that removes the connector
    configUpdateListener.onConnectorConfigRemove(CONN1);
    herder.configState = ClusterConfigState.EMPTY;
    herder.tick();
    time.sleep(1000L);
    assertStatistics("leaderUrl", true, 3, 1, 100, 2100L);
    PowerMock.verifyAll();
}
Also used : FutureCallback(org.apache.kafka.connect.util.FutureCallback) Callback(org.apache.kafka.connect.util.Callback) TopicStatus(org.apache.kafka.connect.runtime.TopicStatus) Herder(org.apache.kafka.connect.runtime.Herder) HashSet(java.util.HashSet) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 4 with TopicStatus

use of org.apache.kafka.connect.runtime.TopicStatus in project kafka by apache.

the class KafkaStatusBackingStoreFormatTest method putTopicStateRetriableFailure.

@Test
public void putTopicStateRetriableFailure() {
    TopicStatus topicStatus = new TopicStatus(FOO_TOPIC, new ConnectorTaskId(FOO_CONNECTOR, 0), time.milliseconds());
    String key = TOPIC_STATUS_PREFIX + FOO_TOPIC + TOPIC_STATUS_SEPARATOR + FOO_CONNECTOR;
    ArgumentCaptor<byte[]> valueCaptor = ArgumentCaptor.forClass(byte[].class);
    doAnswer(invocation -> {
        ((Callback) invocation.getArgument(2)).onCompletion(null, new TimeoutException());
        return null;
    }).doAnswer(invocation -> {
        ((Callback) invocation.getArgument(2)).onCompletion(null, null);
        return null;
    }).when(kafkaBasedLog).send(eq(key), valueCaptor.capture(), any(Callback.class));
    store.put(topicStatus);
    verify(kafkaBasedLog, times(2)).send(any(), any(), any());
    // check capture state
    assertEquals(topicStatus, store.parseTopicStatus(valueCaptor.getValue()));
    // state is not visible until read back from the log
    assertNull(store.getTopic(FOO_CONNECTOR, FOO_TOPIC));
}
Also used : KafkaBasedLog(org.apache.kafka.connect.util.KafkaBasedLog) MockTime(org.apache.kafka.common.utils.MockTime) Arrays(java.util.Arrays) TOPIC_STATUS_SEPARATOR(org.apache.kafka.connect.storage.KafkaStatusBackingStore.TOPIC_STATUS_SEPARATOR) ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) UnknownServerException(org.apache.kafka.common.errors.UnknownServerException) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) TopicStatus(org.apache.kafka.connect.runtime.TopicStatus) Mockito.doAnswer(org.mockito.Mockito.doAnswer) Before(org.junit.Before) TimeoutException(org.apache.kafka.common.errors.TimeoutException) Time(org.apache.kafka.common.utils.Time) SCHEMAS_ENABLE_CONFIG(org.apache.kafka.connect.json.JsonConverterConfig.SCHEMAS_ENABLE_CONFIG) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Mockito.times(org.mockito.Mockito.times) CONNECTOR_STATUS_PREFIX(org.apache.kafka.connect.storage.KafkaStatusBackingStore.CONNECTOR_STATUS_PREFIX) TASK_STATUS_PREFIX(org.apache.kafka.connect.storage.KafkaStatusBackingStore.TASK_STATUS_PREFIX) Mockito.verify(org.mockito.Mockito.verify) Assert.assertNull(org.junit.Assert.assertNull) TOPIC_STATUS_PREFIX(org.apache.kafka.connect.storage.KafkaStatusBackingStore.TOPIC_STATUS_PREFIX) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Assert.assertFalse(org.junit.Assert.assertFalse) JsonConverter(org.apache.kafka.connect.json.JsonConverter) Mockito.any(org.mockito.Mockito.any) Callback(org.apache.kafka.clients.producer.Callback) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) Mockito.eq(org.mockito.Mockito.eq) Mockito.mock(org.mockito.Mockito.mock) Callback(org.apache.kafka.clients.producer.Callback) ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) TopicStatus(org.apache.kafka.connect.runtime.TopicStatus) TimeoutException(org.apache.kafka.common.errors.TimeoutException) Test(org.junit.Test)

Example 5 with TopicStatus

use of org.apache.kafka.connect.runtime.TopicStatus in project kafka by apache.

the class KafkaStatusBackingStoreFormatTest method readTopicStatus.

@Test
public void readTopicStatus() {
    TopicStatus topicStatus = new TopicStatus(FOO_TOPIC, new ConnectorTaskId(FOO_CONNECTOR, 0), Time.SYSTEM.milliseconds());
    String key = TOPIC_STATUS_PREFIX + FOO_TOPIC + TOPIC_STATUS_SEPARATOR + FOO_CONNECTOR;
    byte[] value = store.serializeTopicStatus(topicStatus);
    ConsumerRecord<String, byte[]> statusRecord = new ConsumerRecord<>(STATUS_TOPIC, 0, 0, key, value);
    store.read(statusRecord);
    assertTrue(store.topics.containsKey(FOO_CONNECTOR));
    assertTrue(store.topics.get(FOO_CONNECTOR).containsKey(FOO_TOPIC));
    assertEquals(topicStatus, store.topics.get(FOO_CONNECTOR).get(FOO_TOPIC));
}
Also used : ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) TopicStatus(org.apache.kafka.connect.runtime.TopicStatus) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Test(org.junit.Test)

Aggregations

TopicStatus (org.apache.kafka.connect.runtime.TopicStatus)9 ConnectorTaskId (org.apache.kafka.connect.util.ConnectorTaskId)7 Test (org.junit.Test)7 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)6 Callback (org.apache.kafka.clients.producer.Callback)5 HashSet (java.util.HashSet)4 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)4 Arrays (java.util.Arrays)3 Collections (java.util.Collections)3 UnknownServerException (org.apache.kafka.common.errors.UnknownServerException)3 Time (org.apache.kafka.common.utils.Time)3 KafkaBasedLog (org.apache.kafka.connect.util.KafkaBasedLog)3 HashMap (java.util.HashMap)2 Map (java.util.Map)2 ConcurrentMap (java.util.concurrent.ConcurrentMap)2 ConfigException (org.apache.kafka.common.config.ConfigException)2 RetriableException (org.apache.kafka.common.errors.RetriableException)2 TimeoutException (org.apache.kafka.common.errors.TimeoutException)2 MockTime (org.apache.kafka.common.utils.MockTime)2 SchemaAndValue (org.apache.kafka.connect.data.SchemaAndValue)2