use of org.apache.kafka.test.MockProcessorSupplier in project kafka by apache.
the class StreamPartitionAssignorTest method testAssignWithInternalTopicThatsSourceIsAnotherInternalTopic.
@Test
public void testAssignWithInternalTopicThatsSourceIsAnotherInternalTopic() throws Exception {
String applicationId = "test";
builder.setApplicationId(applicationId);
builder.addInternalTopic("topicX");
builder.addSource("source1", "topic1");
builder.addProcessor("processor1", new MockProcessorSupplier(), "source1");
builder.addSink("sink1", "topicX", "processor1");
builder.addSource("source2", "topicX");
builder.addInternalTopic("topicZ");
builder.addProcessor("processor2", new MockProcessorSupplier(), "source2");
builder.addSink("sink2", "topicZ", "processor2");
builder.addSource("source3", "topicZ");
List<String> topics = Utils.mkList("topic1", "test-topicX", "test-topicZ");
Set<TaskId> allTasks = Utils.mkSet(task0, task1, task2);
UUID uuid1 = UUID.randomUUID();
String client1 = "client1";
StreamThread thread10 = new StreamThread(builder, config, mockClientSupplier, applicationId, client1, uuid1, new Metrics(), Time.SYSTEM, new StreamsMetadataState(builder, StreamsMetadataState.UNKNOWN_HOST), 0);
partitionAssignor.configure(config.getConsumerConfigs(thread10, applicationId, client1));
MockInternalTopicManager internalTopicManager = new MockInternalTopicManager(thread10.config, mockClientSupplier.restoreConsumer);
partitionAssignor.setInternalTopicManager(internalTopicManager);
Map<String, PartitionAssignor.Subscription> subscriptions = new HashMap<>();
Set<TaskId> emptyTasks = Collections.emptySet();
subscriptions.put("consumer10", new PartitionAssignor.Subscription(topics, new SubscriptionInfo(uuid1, emptyTasks, emptyTasks, userEndPoint).encode()));
partitionAssignor.assign(metadata, subscriptions);
// check prepared internal topics
assertEquals(2, internalTopicManager.readyTopics.size());
assertEquals(allTasks.size(), (long) internalTopicManager.readyTopics.get("test-topicZ"));
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class StreamThreadStateStoreProviderTest method before.
@SuppressWarnings("deprecation")
@Before
public void before() throws IOException {
final TopologyBuilder builder = new TopologyBuilder();
builder.addSource("the-source", topicName);
builder.addProcessor("the-processor", new MockProcessorSupplier(), "the-source");
builder.addStateStore(Stores.create("kv-store").withStringKeys().withStringValues().inMemory().build(), "the-processor");
builder.addStateStore(Stores.create("window-store").withStringKeys().withStringValues().persistent().windowed(10, 10, 2, false).build(), "the-processor");
final Properties properties = new Properties();
final String applicationId = "applicationId";
properties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
stateDir = TestUtils.tempDirectory();
properties.put(StreamsConfig.STATE_DIR_CONFIG, stateDir.getPath());
final StreamsConfig streamsConfig = new StreamsConfig(properties);
final MockClientSupplier clientSupplier = new MockClientSupplier();
configureRestoreConsumer(clientSupplier, "applicationId-kv-store-changelog");
configureRestoreConsumer(clientSupplier, "applicationId-window-store-changelog");
builder.setApplicationId(applicationId);
final ProcessorTopology topology = builder.build(null);
tasks = new HashMap<>();
stateDirectory = new StateDirectory(streamsConfig, new MockTime());
taskOne = createStreamsTask(applicationId, streamsConfig, clientSupplier, topology, new TaskId(0, 0));
taskOne.initializeStateStores();
tasks.put(new TaskId(0, 0), taskOne);
taskTwo = createStreamsTask(applicationId, streamsConfig, clientSupplier, topology, new TaskId(0, 1));
taskTwo.initializeStateStores();
tasks.put(new TaskId(0, 1), taskTwo);
threadMock = EasyMock.createNiceMock(StreamThread.class);
provider = new StreamThreadStateStoreProvider(threadMock);
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class KStreamMapValuesTest method testMapValuesWithKeys.
@Test
public void testMapValuesWithKeys() {
StreamsBuilder builder = new StreamsBuilder();
ValueMapperWithKey<Integer, CharSequence, Integer> mapper = new ValueMapperWithKey<Integer, CharSequence, Integer>() {
@Override
public Integer apply(final Integer readOnlyKey, final CharSequence value) {
return value.length() + readOnlyKey;
}
};
final int[] expectedKeys = { 1, 10, 100, 1000 };
KStream<Integer, String> stream;
MockProcessorSupplier<Integer, Integer> processor = new MockProcessorSupplier<>();
stream = builder.stream(topicName, Consumed.with(intSerde, stringSerde));
stream.mapValues(mapper).process(processor);
driver.setUp(builder);
for (int expectedKey : expectedKeys) {
driver.process(topicName, expectedKey, Integer.toString(expectedKey));
}
String[] expected = { "1:2", "10:12", "100:103", "1000:1004" };
assertArrayEquals(expected, processor.processed.toArray());
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class KStreamMapValuesTest method testFlatMapValues.
@Test
public void testFlatMapValues() {
StreamsBuilder builder = new StreamsBuilder();
ValueMapper<CharSequence, Integer> mapper = new ValueMapper<CharSequence, Integer>() {
@Override
public Integer apply(CharSequence value) {
return value.length();
}
};
final int[] expectedKeys = { 1, 10, 100, 1000 };
KStream<Integer, String> stream;
MockProcessorSupplier<Integer, Integer> processor = new MockProcessorSupplier<>();
stream = builder.stream(topicName, Consumed.with(intSerde, stringSerde));
stream.mapValues(mapper).process(processor);
driver.setUp(builder);
for (int expectedKey : expectedKeys) {
driver.process(topicName, expectedKey, Integer.toString(expectedKey));
}
String[] expected = { "1:1", "10:2", "100:3", "1000:4" };
assertArrayEquals(expected, processor.processed.toArray());
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class KStreamSelectKeyTest method testSelectKey.
@Test
public void testSelectKey() {
StreamsBuilder builder = new StreamsBuilder();
final Map<Number, String> keyMap = new HashMap<>();
keyMap.put(1, "ONE");
keyMap.put(2, "TWO");
keyMap.put(3, "THREE");
KeyValueMapper<Object, Number, String> selector = new KeyValueMapper<Object, Number, String>() {
@Override
public String apply(Object key, Number value) {
return keyMap.get(value);
}
};
final String[] expected = new String[] { "ONE:1", "TWO:2", "THREE:3" };
final int[] expectedValues = new int[] { 1, 2, 3 };
KStream<String, Integer> stream = builder.stream(topicName, Consumed.with(stringSerde, integerSerde));
MockProcessorSupplier<String, Integer> processor = new MockProcessorSupplier<>();
stream.selectKey(selector).process(processor);
driver.setUp(builder);
for (int expectedValue : expectedValues) {
driver.process(topicName, null, expectedValue);
}
assertEquals(3, processor.processed.size());
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], processor.processed.get(i));
}
}
Aggregations