use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.
the class TopologyBuilder method topicGroups.
/**
* Returns the map of topic groups keyed by the group id.
* A topic group is a group of topics in the same task.
*
* @return groups of topic names
*/
public synchronized Map<Integer, TopicsInfo> topicGroups() {
Map<Integer, TopicsInfo> topicGroups = new LinkedHashMap<>();
if (nodeGroups == null)
nodeGroups = makeNodeGroups();
for (Map.Entry<Integer, Set<String>> entry : nodeGroups.entrySet()) {
Set<String> sinkTopics = new HashSet<>();
Set<String> sourceTopics = new HashSet<>();
Map<String, InternalTopicConfig> internalSourceTopics = new HashMap<>();
Map<String, InternalTopicConfig> stateChangelogTopics = new HashMap<>();
for (String node : entry.getValue()) {
// if the node is a source node, add to the source topics
List<String> topics = nodeToSourceTopics.get(node);
if (topics != null) {
// if some of the topics are internal, add them to the internal topics
for (String topic : topics) {
// skip global topic as they don't need partition assignment
if (globalTopics.contains(topic)) {
continue;
}
if (this.internalTopicNames.contains(topic)) {
// prefix the internal topic name with the application id
String internalTopic = decorateTopic(topic);
internalSourceTopics.put(internalTopic, new InternalTopicConfig(internalTopic, Collections.singleton(InternalTopicConfig.CleanupPolicy.delete), Collections.<String, String>emptyMap()));
sourceTopics.add(internalTopic);
} else {
sourceTopics.add(topic);
}
}
}
// if the node is a sink node, add to the sink topics
String topic = nodeToSinkTopic.get(node);
if (topic != null) {
if (internalTopicNames.contains(topic)) {
// prefix the change log topic name with the application id
sinkTopics.add(decorateTopic(topic));
} else {
sinkTopics.add(topic);
}
}
// if the node is connected to a state, add to the state topics
for (StateStoreFactory stateFactory : stateFactories.values()) {
final StateStoreSupplier supplier = stateFactory.supplier;
if (supplier.loggingEnabled() && stateFactory.users.contains(node)) {
final String name = ProcessorStateManager.storeChangelogTopic(applicationId, supplier.name());
final InternalTopicConfig internalTopicConfig = createInternalTopicConfig(supplier, name);
stateChangelogTopics.put(name, internalTopicConfig);
}
}
}
if (!sourceTopics.isEmpty()) {
topicGroups.put(entry.getKey(), new TopicsInfo(Collections.unmodifiableSet(sinkTopics), Collections.unmodifiableSet(sourceTopics), Collections.unmodifiableMap(internalSourceTopics), Collections.unmodifiableMap(stateChangelogTopics)));
}
}
return Collections.unmodifiableMap(topicGroups);
}
use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.
the class TopologyBuilder method createInternalTopicConfig.
private InternalTopicConfig createInternalTopicConfig(final StateStoreSupplier<?> supplier, final String name) {
if (!(supplier instanceof WindowStoreSupplier)) {
return new InternalTopicConfig(name, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), supplier.logConfig());
}
final WindowStoreSupplier windowStoreSupplier = (WindowStoreSupplier) supplier;
final InternalTopicConfig config = new InternalTopicConfig(name, Utils.mkSet(InternalTopicConfig.CleanupPolicy.compact, InternalTopicConfig.CleanupPolicy.delete), supplier.logConfig());
config.setRetentionMs(windowStoreSupplier.retentionPeriod());
return config;
}
use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.
the class TopologyBuilderTest method shouldAddInternalTopicConfigWithCompactAndDeleteSetForWindowStores.
@SuppressWarnings("unchecked")
@Test
public void shouldAddInternalTopicConfigWithCompactAndDeleteSetForWindowStores() throws Exception {
final TopologyBuilder builder = new TopologyBuilder();
builder.setApplicationId("appId");
builder.addSource("source", "topic");
builder.addProcessor("processor", new MockProcessorSupplier(), "source");
builder.addStateStore(new RocksDBWindowStoreSupplier("store", 30000, 3, false, null, null, 10000, true, Collections.<String, String>emptyMap(), false), "processor");
final Map<Integer, TopicsInfo> topicGroups = builder.topicGroups();
final TopicsInfo topicsInfo = topicGroups.values().iterator().next();
final InternalTopicConfig topicConfig = topicsInfo.stateChangelogTopics.get("appId-store-changelog");
final Properties properties = topicConfig.toProperties(0);
final List<String> policies = Arrays.asList(properties.getProperty(InternalTopicManager.CLEANUP_POLICY_PROP).split(","));
assertEquals("appId-store-changelog", topicConfig.name());
assertTrue(policies.contains("compact"));
assertTrue(policies.contains("delete"));
assertEquals(2, policies.size());
assertEquals("30000", properties.getProperty(InternalTopicManager.RETENTION_MS));
assertEquals(2, properties.size());
}
use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.
the class MockInternalTopicManager method makeReady.
@Override
public void makeReady(final Map<InternalTopicConfig, Integer> topics) {
for (Map.Entry<InternalTopicConfig, Integer> entry : topics.entrySet()) {
readyTopics.put(entry.getKey().name(), entry.getValue());
final List<PartitionInfo> partitions = new ArrayList<>();
for (int i = 0; i < entry.getValue(); i++) {
partitions.add(new PartitionInfo(entry.getKey().name(), i, null, null, null));
}
restoreConsumer.updatePartitions(entry.getKey().name(), partitions);
}
}
use of org.apache.kafka.streams.processor.internals.InternalTopicConfig in project kafka by apache.
the class TopologyBuilderTest method testTopicGroupsByStateStore.
@Test
public void testTopicGroupsByStateStore() {
final TopologyBuilder builder = new TopologyBuilder();
builder.setApplicationId("X");
builder.addSource("source-1", "topic-1", "topic-1x");
builder.addSource("source-2", "topic-2");
builder.addSource("source-3", "topic-3");
builder.addSource("source-4", "topic-4");
builder.addSource("source-5", "topic-5");
builder.addProcessor("processor-1", new MockProcessorSupplier(), "source-1");
builder.addProcessor("processor-2", new MockProcessorSupplier(), "source-2");
builder.addStateStore(new MockStateStoreSupplier("store-1", false), "processor-1", "processor-2");
builder.addProcessor("processor-3", new MockProcessorSupplier(), "source-3");
builder.addProcessor("processor-4", new MockProcessorSupplier(), "source-4");
builder.addStateStore(new MockStateStoreSupplier("store-2", false), "processor-3", "processor-4");
builder.addProcessor("processor-5", new MockProcessorSupplier(), "source-5");
StateStoreSupplier supplier = new MockStateStoreSupplier("store-3", false);
builder.addStateStore(supplier);
builder.connectProcessorAndStateStores("processor-5", "store-3");
Map<Integer, TopicsInfo> topicGroups = builder.topicGroups();
Map<Integer, TopicsInfo> expectedTopicGroups = new HashMap<>();
final String store1 = ProcessorStateManager.storeChangelogTopic("X", "store-1");
final String store2 = ProcessorStateManager.storeChangelogTopic("X", "store-2");
final String store3 = ProcessorStateManager.storeChangelogTopic("X", "store-3");
expectedTopicGroups.put(0, new TopicsInfo(Collections.<String>emptySet(), mkSet("topic-1", "topic-1x", "topic-2"), Collections.<String, InternalTopicConfig>emptyMap(), Collections.singletonMap(store1, new InternalTopicConfig(store1, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()))));
expectedTopicGroups.put(1, new TopicsInfo(Collections.<String>emptySet(), mkSet("topic-3", "topic-4"), Collections.<String, InternalTopicConfig>emptyMap(), Collections.singletonMap(store2, new InternalTopicConfig(store2, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()))));
expectedTopicGroups.put(2, new TopicsInfo(Collections.<String>emptySet(), mkSet("topic-5"), Collections.<String, InternalTopicConfig>emptyMap(), Collections.singletonMap(store3, new InternalTopicConfig(store3, Collections.singleton(InternalTopicConfig.CleanupPolicy.compact), Collections.<String, String>emptyMap()))));
assertEquals(3, topicGroups.size());
assertEquals(expectedTopicGroups, topicGroups);
}
Aggregations