Search in sources :

Example 1 with InternalTopicConfig

use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.

the class TopologyBuilder method topicGroups.

/**
     * Returns the map of topic groups keyed by the group id.
     * A topic group is a group of topics in the same task.
     *
     * @return groups of topic names
     */
public synchronized Map<Integer, TopicsInfo> topicGroups() {
    Map<Integer, TopicsInfo> topicGroups = new LinkedHashMap<>();
    if (nodeGroups == null)
        nodeGroups = makeNodeGroups();
    for (Map.Entry<Integer, Set<String>> entry : nodeGroups.entrySet()) {
        Set<String> sinkTopics = new HashSet<>();
        Set<String> sourceTopics = new HashSet<>();
        Map<String, InternalTopicConfig> internalSourceTopics = new HashMap<>();
        Map<String, InternalTopicConfig> stateChangelogTopics = new HashMap<>();
        for (String node : entry.getValue()) {
            // if the node is a source node, add to the source topics
            List<String> topics = nodeToSourceTopics.get(node);
            if (topics != null) {
                // if some of the topics are internal, add them to the internal topics
                for (String topic : topics) {
                    // skip global topic as they don't need partition assignment
                    if (globalTopics.contains(topic)) {
                        continue;
                    }
                    if (this.internalTopicNames.contains(topic)) {
                        // prefix the internal topic name with the application id
                        String internalTopic = decorateTopic(topic);
                        internalSourceTopics.put(internalTopic, new InternalTopicConfig(internalTopic, Collections.singleton(InternalTopicConfig.CleanupPolicy.delete), Collections.<String, String>emptyMap()));
                        sourceTopics.add(internalTopic);
                    } else {
                        sourceTopics.add(topic);
                    }
                }
            }
            // if the node is a sink node, add to the sink topics
            String topic = nodeToSinkTopic.get(node);
            if (topic != null) {
                if (internalTopicNames.contains(topic)) {
                    // prefix the change log topic name with the application id
                    sinkTopics.add(decorateTopic(topic));
                } else {
                    sinkTopics.add(topic);
                }
            }
            // if the node is connected to a state, add to the state topics
            for (StateStoreFactory stateFactory : stateFactories.values()) {
                final StateStoreSupplier supplier = stateFactory.supplier;
                if (supplier.loggingEnabled() && stateFactory.users.contains(node)) {
                    final String name = ProcessorStateManager.storeChangelogTopic(applicationId, supplier.name());
                    final InternalTopicConfig internalTopicConfig = createInternalTopicConfig(supplier, name);
                    stateChangelogTopics.put(name, internalTopicConfig);
                }
            }
        }
        if (!sourceTopics.isEmpty()) {
            topicGroups.put(entry.getKey(), new TopicsInfo(Collections.unmodifiableSet(sinkTopics), Collections.unmodifiableSet(sourceTopics), Collections.unmodifiableMap(internalSourceTopics), Collections.unmodifiableMap(stateChangelogTopics)));
        }
    }
    return Collections.unmodifiableMap(topicGroups);
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) InternalTopicConfig(org.apache.kafka.streams.processor.internals.InternalTopicConfig) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) HashSet(java.util.HashSet)

Example 2 with InternalTopicConfig

use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.

the class TopologyBuilder method createInternalTopicConfig.

private InternalTopicConfig createInternalTopicConfig(final StateStoreSupplier<?> supplier, final String name) {
    if (!(supplier instanceof WindowStoreSupplier)) {
        return new InternalTopicConfig(name, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), supplier.logConfig());
    }
    final WindowStoreSupplier windowStoreSupplier = (WindowStoreSupplier) supplier;
    final InternalTopicConfig config = new InternalTopicConfig(name, Utils.mkSet(InternalTopicConfig.CleanupPolicy.compact, InternalTopicConfig.CleanupPolicy.delete), supplier.logConfig());
    config.setRetentionMs(windowStoreSupplier.retentionPeriod());
    return config;
}
Also used : WindowStoreSupplier(org.apache.kafka.streams.state.internals.WindowStoreSupplier) InternalTopicConfig(org.apache.kafka.streams.processor.internals.InternalTopicConfig)

Example 3 with InternalTopicConfig

use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.

the class TopologyBuilderTest method shouldAddInternalTopicConfigWithCompactAndDeleteSetForWindowStores.

@SuppressWarnings("unchecked")
@Test
public void shouldAddInternalTopicConfigWithCompactAndDeleteSetForWindowStores() throws Exception {
    final TopologyBuilder builder = new TopologyBuilder();
    builder.setApplicationId("appId");
    builder.addSource("source", "topic");
    builder.addProcessor("processor", new MockProcessorSupplier(), "source");
    builder.addStateStore(new RocksDBWindowStoreSupplier("store", 30000, 3, false, null, null, 10000, true, Collections.<String, String>emptyMap(), false), "processor");
    final Map<Integer, TopicsInfo> topicGroups = builder.topicGroups();
    final TopicsInfo topicsInfo = topicGroups.values().iterator().next();
    final InternalTopicConfig topicConfig = topicsInfo.stateChangelogTopics.get("appId-store-changelog");
    final Properties properties = topicConfig.toProperties(0);
    final List<String> policies = Arrays.asList(properties.getProperty(InternalTopicManager.CLEANUP_POLICY_PROP).split(","));
    assertEquals("appId-store-changelog", topicConfig.name());
    assertTrue(policies.contains("compact"));
    assertTrue(policies.contains("delete"));
    assertEquals(2, policies.size());
    assertEquals("30000", properties.getProperty(InternalTopicManager.RETENTION_MS));
    assertEquals(2, properties.size());
}
Also used : MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) InternalTopicConfig(org.apache.kafka.streams.processor.internals.InternalTopicConfig) TopicsInfo(org.apache.kafka.streams.processor.TopologyBuilder.TopicsInfo) Properties(java.util.Properties) RocksDBWindowStoreSupplier(org.apache.kafka.streams.state.internals.RocksDBWindowStoreSupplier) Test(org.junit.Test)

Example 4 with InternalTopicConfig

use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.

the class MockInternalTopicManager method makeReady.

@Override
public void makeReady(final Map<InternalTopicConfig, Integer> topics) {
    for (Map.Entry<InternalTopicConfig, Integer> entry : topics.entrySet()) {
        readyTopics.put(entry.getKey().name(), entry.getValue());
        final List<PartitionInfo> partitions = new ArrayList<>();
        for (int i = 0; i < entry.getValue(); i++) {
            partitions.add(new PartitionInfo(entry.getKey().name(), i, null, null, null));
        }
        restoreConsumer.updatePartitions(entry.getKey().name(), partitions);
    }
}
Also used : ArrayList(java.util.ArrayList) InternalTopicConfig(org.apache.kafka.streams.processor.internals.InternalTopicConfig) PartitionInfo(org.apache.kafka.common.PartitionInfo) Map(java.util.Map) HashMap(java.util.HashMap)

Example 5 with InternalTopicConfig

use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.

the class TopologyBuilderTest method testTopicGroupsByStateStore.

@Test
public void testTopicGroupsByStateStore() {
    final TopologyBuilder builder = new TopologyBuilder();
    builder.setApplicationId("X");
    builder.addSource("source-1", "topic-1", "topic-1x");
    builder.addSource("source-2", "topic-2");
    builder.addSource("source-3", "topic-3");
    builder.addSource("source-4", "topic-4");
    builder.addSource("source-5", "topic-5");
    builder.addProcessor("processor-1", new MockProcessorSupplier(), "source-1");
    builder.addProcessor("processor-2", new MockProcessorSupplier(), "source-2");
    builder.addStateStore(new MockStateStoreSupplier("store-1", false), "processor-1", "processor-2");
    builder.addProcessor("processor-3", new MockProcessorSupplier(), "source-3");
    builder.addProcessor("processor-4", new MockProcessorSupplier(), "source-4");
    builder.addStateStore(new MockStateStoreSupplier("store-2", false), "processor-3", "processor-4");
    builder.addProcessor("processor-5", new MockProcessorSupplier(), "source-5");
    StateStoreSupplier supplier = new MockStateStoreSupplier("store-3", false);
    builder.addStateStore(supplier);
    builder.connectProcessorAndStateStores("processor-5", "store-3");
    Map<Integer, TopicsInfo> topicGroups = builder.topicGroups();
    Map<Integer, TopicsInfo> expectedTopicGroups = new HashMap<>();
    final String store1 = ProcessorStateManager.storeChangelogTopic("X", "store-1");
    final String store2 = ProcessorStateManager.storeChangelogTopic("X", "store-2");
    final String store3 = ProcessorStateManager.storeChangelogTopic("X", "store-3");
    expectedTopicGroups.put(0, new TopicsInfo(Collections.<String>emptySet(), mkSet("topic-1", "topic-1x", "topic-2"), Collections.<String, InternalTopicConfig>emptyMap(), Collections.singletonMap(store1, new InternalTopicConfig(store1, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()))));
    expectedTopicGroups.put(1, new TopicsInfo(Collections.<String>emptySet(), mkSet("topic-3", "topic-4"), Collections.<String, InternalTopicConfig>emptyMap(), Collections.singletonMap(store2, new InternalTopicConfig(store2, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()))));
    expectedTopicGroups.put(2, new TopicsInfo(Collections.<String>emptySet(), mkSet("topic-5"), Collections.<String, InternalTopicConfig>emptyMap(), Collections.singletonMap(store3, new InternalTopicConfig(store3, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()))));
    assertEquals(3, topicGroups.size());
    assertEquals(expectedTopicGroups, topicGroups);
}
Also used : MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) HashMap(java.util.HashMap) MockStateStoreSupplier(org.apache.kafka.test.MockStateStoreSupplier) MockStateStoreSupplier(org.apache.kafka.test.MockStateStoreSupplier) InternalTopicConfig(org.apache.kafka.streams.processor.internals.InternalTopicConfig) TopicsInfo(org.apache.kafka.streams.processor.TopologyBuilder.TopicsInfo) Test(org.junit.Test)

Aggregations

InternalTopicConfig (org.apache.kafka.streams.processor.internals.InternalTopicConfig)8 TopicsInfo (org.apache.kafka.streams.processor.TopologyBuilder.TopicsInfo)5 Test (org.junit.Test)5 HashMap (java.util.HashMap)4 MockProcessorSupplier (org.apache.kafka.test.MockProcessorSupplier)4 Properties (java.util.Properties)3 HashSet (java.util.HashSet)2 Map (java.util.Map)2 Set (java.util.Set)2 MockStateStoreSupplier (org.apache.kafka.test.MockStateStoreSupplier)2 ArrayList (java.util.ArrayList)1 LinkedHashMap (java.util.LinkedHashMap)1 PartitionInfo (org.apache.kafka.common.PartitionInfo)1 Utils.mkSet (org.apache.kafka.common.utils.Utils.mkSet)1 RocksDBWindowStoreSupplier (org.apache.kafka.streams.state.internals.RocksDBWindowStoreSupplier)1 WindowStoreSupplier (org.apache.kafka.streams.state.internals.WindowStoreSupplier)1