Search in sources :

Example 91 with Metrics

use of org.apache.kafka.common.metrics.Metrics in project kafka by apache.

the class StreamPartitionAssignorTest method testAssignWithPartialTopology.

@Test
public void testAssignWithPartialTopology() throws Exception {
    Properties props = configProps();
    props.put(StreamsConfig.PARTITION_GROUPER_CLASS_CONFIG, SingleGroupPartitionGrouperStub.class);
    StreamsConfig config = new StreamsConfig(props);
    builder.addSource("source1", "topic1");
    builder.addProcessor("processor1", new MockProcessorSupplier(), "source1");
    builder.addStateStore(new MockStateStoreSupplier("store1", false), "processor1");
    builder.addSource("source2", "topic2");
    builder.addProcessor("processor2", new MockProcessorSupplier(), "source2");
    builder.addStateStore(new MockStateStoreSupplier("store2", false), "processor2");
    List<String> topics = Utils.mkList("topic1", "topic2");
    Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
    UUID uuid1 = UUID.randomUUID();
    String client1 = "client1";
    StreamThread thread10 = new StreamThread(builder, config, mockClientSupplier, "test", client1, uuid1, new Metrics(), Time.SYSTEM, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0);
    partitionAssignor.configure(config.getConsumerConfigs(thread10, "test", client1));
    partitionAssignor.setInternalTopicManager(new MockInternalTopicManager(thread10.config, mockClientSupplier.restoreConsumer));
    Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
    subscriptions.put("consumer10", new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, Collections.<TaskId>emptySet(), Collections.<TaskId>emptySet(), userEndPoint).encode()));
    // will throw exception if it fails
    Map<String, PartitionAssignor.Assignment> assignments = partitionAssignor.assign(metadata, subscriptions);
    // check assignment info
    Set<TaskId> allActiveTasks = new HashSet<>();
    AssignmentInfo info10 = checkAssignment(Utils.mkSet("topic1"), assignments.get("consumer10"));
    allActiveTasks.addAll(info10.activeTasks);
    assertEquals(3, allActiveTasks.size());
    assertEquals(allTasks, new HashSet<>(allActiveTasks));
}
Also used : TaskId(org.apache.kafka.streams.processor.TaskId) HashMap(java.util.HashMap) MockInternalTopicManager(org.apache.kafka.test.MockInternalTopicManager) SubscriptionInfo(org.apache.kafka.streams.processor.internals.assignment.SubscriptionInfo) Properties(java.util.Properties) AssignmentInfo(org.apache.kafka.streams.processor.internals.assignment.AssignmentInfo) Metrics(org.apache.kafka.common.metrics.Metrics) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) MockStateStoreSupplier(org.apache.kafka.test.MockStateStoreSupplier) PartitionAssignor(org.apache.kafka.clients.consumer.internals.PartitionAssignor) UUID(java.util.UUID) StreamsConfig(org.apache.kafka.streams.StreamsConfig) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 92 with Metrics

use of org.apache.kafka.common.metrics.Metrics in project kafka by apache.

the class StreamPartitionAssignorTest method testAssignWithInternalTopicThatsSourceIsAnotherInternalTopic.

@Test
public void testAssignWithInternalTopicThatsSourceIsAnotherInternalTopic() throws Exception {
    String applicationId = "test";
    builder.setApplicationId(applicationId);
    builder.addInternalTopic("topicX");
    builder.addSource("source1", "topic1");
    builder.addProcessor("processor1", new MockProcessorSupplier(), "source1");
    builder.addSink("sink1", "topicX", "processor1");
    builder.addSource("source2", "topicX");
    builder.addInternalTopic("topicZ");
    builder.addProcessor("processor2", new MockProcessorSupplier(), "source2");
    builder.addSink("sink2", "topicZ", "processor2");
    builder.addSource("source3", "topicZ");
    List<String> topics = Utils.mkList("topic1", "test-topicX", "test-topicZ");
    Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
    UUID uuid1 = UUID.randomUUID();
    String client1 = "client1";
    StreamThread thread10 = new StreamThread(builder, config, mockClientSupplier, applicationId, client1, uuid1, new Metrics(), Time.SYSTEM, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0);
    partitionAssignor.configure(config.getConsumerConfigs(thread10, applicationId, client1));
    MockInternalTopicManager internalTopicManager = new MockInternalTopicManager(thread10.config, mockClientSupplier.restoreConsumer);
    partitionAssignor.setInternalTopicManager(internalTopicManager);
    Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
    Set<TaskId> emptyTasks = Collections.emptySet();
    subscriptions.put("consumer10", new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, emptyTasks, emptyTasks, userEndPoint).encode()));
    partitionAssignor.assign(metadata, subscriptions);
    // check prepared internal topics
    assertEquals(2, internalTopicManager.readyTopics.size());
    assertEquals(allTasks.size(), (long) internalTopicManager.readyTopics.get("test-topicZ"));
}
Also used : TaskId(org.apache.kafka.streams.processor.TaskId) HashMap(java.util.HashMap) MockInternalTopicManager(org.apache.kafka.test.MockInternalTopicManager) SubscriptionInfo(org.apache.kafka.streams.processor.internals.assignment.SubscriptionInfo) Metrics(org.apache.kafka.common.metrics.Metrics) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) PartitionAssignor(org.apache.kafka.clients.consumer.internals.PartitionAssignor) UUID(java.util.UUID) Test(org.junit.Test)

Example 93 with Metrics

use of org.apache.kafka.common.metrics.Metrics in project kafka by apache.

the class StandbyTaskTest method shouldNotThrowUnsupportedOperationExceptionWhenInitializingStateStores.

@Test
public void shouldNotThrowUnsupportedOperationExceptionWhenInitializingStateStores() throws Exception {
    final String changelogName = "test-application-my-store-changelog";
    final List<TopicPartition> partitions = Utils.mkList(new TopicPartition(changelogName, 0));
    consumer.assign(partitions);
    final Map<TopicPartition, OffsetAndMetadata> committedOffsets = new HashMap<>();
    committedOffsets.put(new TopicPartition(changelogName, 0), new OffsetAndMetadata(0L));
    consumer.commitSync(committedOffsets);
    restoreStateConsumer.updatePartitions(changelogName, Utils.mkList(new PartitionInfo(changelogName, 0, Node.noNode(), new Node[0], new Node[0])));
    final KStreamBuilder builder = new KStreamBuilder();
    builder.stream("topic").groupByKey().count("my-store");
    final ProcessorTopology topology = builder.setApplicationId(applicationId).build(0);
    StreamsConfig config = createConfig(baseDir);
    new StandbyTask(taskId, applicationId, partitions, topology, consumer, changelogReader, config, new MockStreamsMetrics(new Metrics()), stateDirectory);
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) HashMap(java.util.HashMap) Metrics(org.apache.kafka.common.metrics.Metrics) TopicPartition(org.apache.kafka.common.TopicPartition) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) PartitionInfo(org.apache.kafka.common.PartitionInfo) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Example 94 with Metrics

use of org.apache.kafka.common.metrics.Metrics in project kafka by apache.

the class CachingWindowStoreTest method setUp.

@Before
public void setUp() throws Exception {
    keySchema = new WindowKeySchema();
    underlying = new RocksDBSegmentedBytesStore("test", 30000, 3, keySchema);
    final RocksDBWindowStore<Bytes, byte[]> windowStore = new RocksDBWindowStore<>(underlying, Serdes.Bytes(), Serdes.ByteArray(), false);
    cacheListener = new CachingKeyValueStoreTest.CacheFlushListenerStub<>();
    cachingStore = new CachingWindowStore<>(windowStore, Serdes.String(), Serdes.String(), WINDOW_SIZE);
    cachingStore.setFlushListener(cacheListener);
    cache = new ThreadCache("testCache", MAX_CACHE_SIZE_BYTES, new MockStreamsMetrics(new Metrics()));
    topic = "topic";
    final MockProcessorContext context = new MockProcessorContext(TestUtils.tempDirectory(), null, null, (RecordCollector) null, cache);
    context.setRecordContext(new ProcessorRecordContext(DEFAULT_TIMESTAMP, 0, 0, topic));
    cachingStore.init(context, cachingStore);
}
Also used : MockStreamsMetrics(org.apache.kafka.streams.processor.internals.MockStreamsMetrics) MockProcessorContext(org.apache.kafka.test.MockProcessorContext) Bytes(org.apache.kafka.common.utils.Bytes) MockStreamsMetrics(org.apache.kafka.streams.processor.internals.MockStreamsMetrics) Metrics(org.apache.kafka.common.metrics.Metrics) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) Before(org.junit.Before)

Example 95 with Metrics

use of org.apache.kafka.common.metrics.Metrics in project kafka by apache.

the class ChangeLoggingSegmentedBytesStoreTest method setUp.

@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
    final NoOpRecordCollector collector = new NoOpRecordCollector() {

        @Override
        public <K, V> void send(final String topic, K key, V value, Integer partition, Long timestamp, Serializer<K> keySerializer, Serializer<V> valueSerializer) {
            sent.put(key, value);
        }
    };
    final MockProcessorContext context = new MockProcessorContext(TestUtils.tempDirectory(), Serdes.String(), Serdes.Long(), collector, new ThreadCache("testCache", 0, new MockStreamsMetrics(new Metrics())));
    context.setTime(0);
    store.init(context, store);
}
Also used : MockStreamsMetrics(org.apache.kafka.streams.processor.internals.MockStreamsMetrics) Metrics(org.apache.kafka.common.metrics.Metrics) NoOpRecordCollector(org.apache.kafka.test.NoOpRecordCollector) MockStreamsMetrics(org.apache.kafka.streams.processor.internals.MockStreamsMetrics) MockProcessorContext(org.apache.kafka.test.MockProcessorContext) Serializer(org.apache.kafka.common.serialization.Serializer) Before(org.junit.Before)

Aggregations

Metrics (org.apache.kafka.common.metrics.Metrics)103 Test (org.junit.Test)76 MockStreamsMetrics (org.apache.kafka.streams.processor.internals.MockStreamsMetrics)41 HashMap (java.util.HashMap)31 StreamsConfig (org.apache.kafka.streams.StreamsConfig)28 TaskId (org.apache.kafka.streams.processor.TaskId)27 Before (org.junit.Before)22 MockTime (org.apache.kafka.common.utils.MockTime)21 TopicPartition (org.apache.kafka.common.TopicPartition)20 HashSet (java.util.HashSet)19 StreamsMetrics (org.apache.kafka.streams.StreamsMetrics)17 MockClientSupplier (org.apache.kafka.test.MockClientSupplier)17 UUID (java.util.UUID)16 PartitionAssignor (org.apache.kafka.clients.consumer.internals.PartitionAssignor)15 Bytes (org.apache.kafka.common.utils.Bytes)14 MockProcessorSupplier (org.apache.kafka.test.MockProcessorSupplier)14 KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)13 SubscriptionInfo (org.apache.kafka.streams.processor.internals.assignment.SubscriptionInfo)13 MockProcessorContext (org.apache.kafka.test.MockProcessorContext)13 MockInternalTopicManager (org.apache.kafka.test.MockInternalTopicManager)11