use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class ClientAuthenticationFailureTest method testConsumerWithInvalidCredentials.
@Test
public void testConsumerWithInvalidCredentials() {
Map<String, Object> props = new HashMap<>(saslClientConfigs);
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + server.port());
props.put(ConsumerConfig.GROUP_ID_CONFIG, "");
StringDeserializer deserializer = new StringDeserializer();
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props, deserializer, deserializer)) {
assertThrows(SaslAuthenticationException.class, () -> {
consumer.subscribe(Collections.singleton(topic));
consumer.poll(Duration.ofSeconds(10));
});
}
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class FetcherTest method testReadCommittedWithCompactedTopic.
@Test
public void testReadCommittedWithCompactedTopic() {
buildFetcher(OffsetResetStrategy.EARLIEST, new StringDeserializer(), new StringDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
ByteBuffer buffer = ByteBuffer.allocate(1024);
long pid1 = 1L;
long pid2 = 2L;
long pid3 = 3L;
appendTransactionalRecords(buffer, pid3, 3L, new SimpleRecord("3".getBytes(), "value".getBytes()), new SimpleRecord("4".getBytes(), "value".getBytes()));
appendTransactionalRecords(buffer, pid2, 15L, new SimpleRecord("15".getBytes(), "value".getBytes()), new SimpleRecord("16".getBytes(), "value".getBytes()), new SimpleRecord("17".getBytes(), "value".getBytes()));
appendTransactionalRecords(buffer, pid1, 22L, new SimpleRecord("22".getBytes(), "value".getBytes()), new SimpleRecord("23".getBytes(), "value".getBytes()));
abortTransaction(buffer, pid2, 28L);
appendTransactionalRecords(buffer, pid3, 30L, new SimpleRecord("30".getBytes(), "value".getBytes()), new SimpleRecord("31".getBytes(), "value".getBytes()), new SimpleRecord("32".getBytes(), "value".getBytes()));
commitTransaction(buffer, pid3, 35L);
appendTransactionalRecords(buffer, pid1, 39L, new SimpleRecord("39".getBytes(), "value".getBytes()), new SimpleRecord("40".getBytes(), "value".getBytes()));
// transaction from pid1 is aborted, but the marker is not included in the fetch
buffer.flip();
// send the fetch
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 0);
assertEquals(1, fetcher.sendFetches());
// prepare the response. the aborted transactions begin at offsets which are no longer in the log
List<FetchResponseData.AbortedTransaction> abortedTransactions = Arrays.asList(new FetchResponseData.AbortedTransaction().setProducerId(pid2).setFirstOffset(6), new FetchResponseData.AbortedTransaction().setProducerId(pid1).setFirstOffset(0));
client.prepareResponse(fullFetchResponseWithAbortedTransactions(MemoryRecords.readableRecords(buffer), abortedTransactions, Errors.NONE, 100L, 100L, 0));
consumerClient.poll(time.timer(0));
assertTrue(fetcher.hasCompletedFetches());
Map<TopicPartition, List<ConsumerRecord<String, String>>> allFetchedRecords = fetchedRecords();
assertTrue(allFetchedRecords.containsKey(tp0));
List<ConsumerRecord<String, String>> fetchedRecords = allFetchedRecords.get(tp0);
assertEquals(5, fetchedRecords.size());
assertEquals(Arrays.asList(3L, 4L, 30L, 31L, 32L), collectRecordOffsets(fetchedRecords));
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class KafkaConsumerTest method testMetricsReporterAutoGeneratedClientId.
@Test
public void testMetricsReporterAutoGeneratedClientId() {
Properties props = new Properties();
props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
props.setProperty(ConsumerConfig.METRIC_REPORTER_CLASSES_CONFIG, MockMetricsReporter.class.getName());
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props, new StringDeserializer(), new StringDeserializer());
MockMetricsReporter mockMetricsReporter = (MockMetricsReporter) consumer.metrics.reporters().get(0);
assertEquals(consumer.getClientId(), mockMetricsReporter.clientId);
consumer.close();
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class KStreamRepartitionTest method shouldInvokePartitionerWhenSet.
@Test
public void shouldInvokePartitionerWhenSet() {
final int[] expectedKeys = new int[] { 0, 1 };
final StreamPartitioner<Integer, String> streamPartitionerMock = EasyMock.mock(StreamPartitioner.class);
expect(streamPartitionerMock.partition(anyString(), eq(0), eq("X0"), anyInt())).andReturn(1).times(1);
expect(streamPartitionerMock.partition(anyString(), eq(1), eq("X1"), anyInt())).andReturn(1).times(1);
replay(streamPartitionerMock);
final String repartitionOperationName = "test";
final Repartitioned<Integer, String> repartitioned = Repartitioned.streamPartitioner(streamPartitionerMock).withName(repartitionOperationName);
builder.<Integer, String>stream(inputTopic).repartition(repartitioned);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> testInputTopic = driver.createInputTopic(inputTopic, new IntegerSerializer(), new StringSerializer());
final String topicName = repartitionOutputTopic(props, repartitionOperationName);
final TestOutputTopic<Integer, String> testOutputTopic = driver.createOutputTopic(topicName, new IntegerDeserializer(), new StringDeserializer());
for (int i = 0; i < 2; i++) {
testInputTopic.pipeInput(expectedKeys[i], "X" + expectedKeys[i], i + 10);
}
assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(0, "X0", Instant.ofEpochMilli(10))));
assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(1, "X1", Instant.ofEpochMilli(11))));
assertTrue(testOutputTopic.readRecordsToList().isEmpty());
}
verify(streamPartitionerMock);
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class CogroupedKStreamImplTest method shouldCogroupAndAggregateTwoKStreamsWithSharedKeys.
@Test
public void shouldCogroupAndAggregateTwoKStreamsWithSharedKeys() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> stream1 = builder.stream("one", stringConsumed);
final KStream<String, String> stream2 = builder.stream("two", stringConsumed);
final KGroupedStream<String, String> grouped1 = stream1.groupByKey();
final KGroupedStream<String, String> grouped2 = stream2.groupByKey();
final KTable<String, String> customers = grouped1.cogroup(STRING_AGGREGATOR).cogroup(grouped2, STRING_AGGREGATOR).aggregate(STRING_INITIALIZER);
customers.toStream().to(OUTPUT);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> testInputTopic = driver.createInputTopic("one", new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> testInputTopic2 = driver.createInputTopic("two", new StringSerializer(), new StringSerializer());
final TestOutputTopic<String, String> testOutputTopic = driver.createOutputTopic(OUTPUT, new StringDeserializer(), new StringDeserializer());
testInputTopic.pipeInput("k1", "A", 0L);
testInputTopic.pipeInput("k2", "A", 1L);
testInputTopic.pipeInput("k1", "A", 10L);
testInputTopic.pipeInput("k2", "A", 100L);
testInputTopic2.pipeInput("k2", "B", 100L);
testInputTopic2.pipeInput("k2", "B", 200L);
testInputTopic2.pipeInput("k1", "B", 1L);
testInputTopic2.pipeInput("k2", "B", 500L);
testInputTopic2.pipeInput("k1", "B", 500L);
testInputTopic2.pipeInput("k2", "B", 500L);
testInputTopic2.pipeInput("k3", "B", 500L);
testInputTopic2.pipeInput("k2", "B", 100L);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "A", 0);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "A", 1);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "AA", 10);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AA", 100);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AAB", 100);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AABB", 200);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "AAB", 10);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AABBB", 500);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "AABB", 500);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AABBBB", 500);
}
}
Aggregations