Search in sources :

Example 31 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.

the class ClientAuthenticationFailureTest method testConsumerWithInvalidCredentials.

@Test
public void testConsumerWithInvalidCredentials() {
    Map<String, Object> props = new HashMap<>(saslClientConfigs);
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + server.port());
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "");
    StringDeserializer deserializer = new StringDeserializer();
    try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props, deserializer, deserializer)) {
        assertThrows(SaslAuthenticationException.class, () -> {
            consumer.subscribe(Collections.singleton(topic));
            consumer.poll(Duration.ofSeconds(10));
        });
    }
}
Also used : HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Test(org.junit.jupiter.api.Test)

Example 32 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.

the class FetcherTest method testReadCommittedWithCompactedTopic.

@Test
public void testReadCommittedWithCompactedTopic() {
    buildFetcher(OffsetResetStrategy.EARLIEST, new StringDeserializer(), new StringDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    long pid1 = 1L;
    long pid2 = 2L;
    long pid3 = 3L;
    appendTransactionalRecords(buffer, pid3, 3L, new SimpleRecord("3".getBytes(), "value".getBytes()), new SimpleRecord("4".getBytes(), "value".getBytes()));
    appendTransactionalRecords(buffer, pid2, 15L, new SimpleRecord("15".getBytes(), "value".getBytes()), new SimpleRecord("16".getBytes(), "value".getBytes()), new SimpleRecord("17".getBytes(), "value".getBytes()));
    appendTransactionalRecords(buffer, pid1, 22L, new SimpleRecord("22".getBytes(), "value".getBytes()), new SimpleRecord("23".getBytes(), "value".getBytes()));
    abortTransaction(buffer, pid2, 28L);
    appendTransactionalRecords(buffer, pid3, 30L, new SimpleRecord("30".getBytes(), "value".getBytes()), new SimpleRecord("31".getBytes(), "value".getBytes()), new SimpleRecord("32".getBytes(), "value".getBytes()));
    commitTransaction(buffer, pid3, 35L);
    appendTransactionalRecords(buffer, pid1, 39L, new SimpleRecord("39".getBytes(), "value".getBytes()), new SimpleRecord("40".getBytes(), "value".getBytes()));
    // transaction from pid1 is aborted, but the marker is not included in the fetch
    buffer.flip();
    // send the fetch
    assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    assertEquals(1, fetcher.sendFetches());
    // prepare the response. the aborted transactions begin at offsets which are no longer in the log
    List<FetchResponseData.AbortedTransaction> abortedTransactions = Arrays.asList(new FetchResponseData.AbortedTransaction().setProducerId(pid2).setFirstOffset(6), new FetchResponseData.AbortedTransaction().setProducerId(pid1).setFirstOffset(0));
    client.prepareResponse(fullFetchResponseWithAbortedTransactions(MemoryRecords.readableRecords(buffer), abortedTransactions, Errors.NONE, 100L, 100L, 0));
    consumerClient.poll(time.timer(0));
    assertTrue(fetcher.hasCompletedFetches());
    Map<TopicPartition, List<ConsumerRecord<String, String>>> allFetchedRecords = fetchedRecords();
    assertTrue(allFetchedRecords.containsKey(tp0));
    List<ConsumerRecord<String, String>> fetchedRecords = allFetchedRecords.get(tp0);
    assertEquals(5, fetchedRecords.size());
    assertEquals(Arrays.asList(3L, 4L, 30L, 31L, 32L), collectRecordOffsets(fetchedRecords));
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ByteBuffer(java.nio.ByteBuffer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) FetchResponseData(org.apache.kafka.common.message.FetchResponseData) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) Collections.singletonList(java.util.Collections.singletonList) Arrays.asList(java.util.Arrays.asList) ArrayList(java.util.ArrayList) Collections.emptyList(java.util.Collections.emptyList) List(java.util.List) Test(org.junit.jupiter.api.Test)

Example 33 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.

the class KafkaConsumerTest method testMetricsReporterAutoGeneratedClientId.

@Test
public void testMetricsReporterAutoGeneratedClientId() {
    Properties props = new Properties();
    props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
    props.setProperty(ConsumerConfig.METRIC_REPORTER_CLASSES_CONFIG, MockMetricsReporter.class.getName());
    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props, new StringDeserializer(), new StringDeserializer());
    MockMetricsReporter mockMetricsReporter = (MockMetricsReporter) consumer.metrics.reporters().get(0);
    assertEquals(consumer.getClientId(), mockMetricsReporter.clientId);
    consumer.close();
}
Also used : MockMetricsReporter(org.apache.kafka.test.MockMetricsReporter) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Properties(java.util.Properties) Test(org.junit.jupiter.api.Test)

Example 34 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.

the class KStreamRepartitionTest method shouldInvokePartitionerWhenSet.

@Test
public void shouldInvokePartitionerWhenSet() {
    final int[] expectedKeys = new int[] { 0, 1 };
    final StreamPartitioner<Integer, String> streamPartitionerMock = EasyMock.mock(StreamPartitioner.class);
    expect(streamPartitionerMock.partition(anyString(), eq(0), eq("X0"), anyInt())).andReturn(1).times(1);
    expect(streamPartitionerMock.partition(anyString(), eq(1), eq("X1"), anyInt())).andReturn(1).times(1);
    replay(streamPartitionerMock);
    final String repartitionOperationName = "test";
    final Repartitioned<Integer, String> repartitioned = Repartitioned.streamPartitioner(streamPartitionerMock).withName(repartitionOperationName);
    builder.<Integer, String>stream(inputTopic).repartition(repartitioned);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> testInputTopic = driver.createInputTopic(inputTopic, new IntegerSerializer(), new StringSerializer());
        final String topicName = repartitionOutputTopic(props, repartitionOperationName);
        final TestOutputTopic<Integer, String> testOutputTopic = driver.createOutputTopic(topicName, new IntegerDeserializer(), new StringDeserializer());
        for (int i = 0; i < 2; i++) {
            testInputTopic.pipeInput(expectedKeys[i], "X" + expectedKeys[i], i + 10);
        }
        assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(0, "X0", Instant.ofEpochMilli(10))));
        assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(1, "X1", Instant.ofEpochMilli(11))));
        assertTrue(testOutputTopic.readRecordsToList().isEmpty());
    }
    verify(streamPartitionerMock);
}
Also used : IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) EasyMock.anyString(org.easymock.EasyMock.anyString) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 35 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.

the class CogroupedKStreamImplTest method shouldCogroupAndAggregateTwoKStreamsWithSharedKeys.

@Test
public void shouldCogroupAndAggregateTwoKStreamsWithSharedKeys() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> stream1 = builder.stream("one", stringConsumed);
    final KStream<String, String> stream2 = builder.stream("two", stringConsumed);
    final KGroupedStream<String, String> grouped1 = stream1.groupByKey();
    final KGroupedStream<String, String> grouped2 = stream2.groupByKey();
    final KTable<String, String> customers = grouped1.cogroup(STRING_AGGREGATOR).cogroup(grouped2, STRING_AGGREGATOR).aggregate(STRING_INITIALIZER);
    customers.toStream().to(OUTPUT);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> testInputTopic = driver.createInputTopic("one", new StringSerializer(), new StringSerializer());
        final TestInputTopic<String, String> testInputTopic2 = driver.createInputTopic("two", new StringSerializer(), new StringSerializer());
        final TestOutputTopic<String, String> testOutputTopic = driver.createOutputTopic(OUTPUT, new StringDeserializer(), new StringDeserializer());
        testInputTopic.pipeInput("k1", "A", 0L);
        testInputTopic.pipeInput("k2", "A", 1L);
        testInputTopic.pipeInput("k1", "A", 10L);
        testInputTopic.pipeInput("k2", "A", 100L);
        testInputTopic2.pipeInput("k2", "B", 100L);
        testInputTopic2.pipeInput("k2", "B", 200L);
        testInputTopic2.pipeInput("k1", "B", 1L);
        testInputTopic2.pipeInput("k2", "B", 500L);
        testInputTopic2.pipeInput("k1", "B", 500L);
        testInputTopic2.pipeInput("k2", "B", 500L);
        testInputTopic2.pipeInput("k3", "B", 500L);
        testInputTopic2.pipeInput("k2", "B", 100L);
        assertOutputKeyValueTimestamp(testOutputTopic, "k1", "A", 0);
        assertOutputKeyValueTimestamp(testOutputTopic, "k2", "A", 1);
        assertOutputKeyValueTimestamp(testOutputTopic, "k1", "AA", 10);
        assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AA", 100);
        assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AAB", 100);
        assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AABB", 200);
        assertOutputKeyValueTimestamp(testOutputTopic, "k1", "AAB", 10);
        assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AABBB", 500);
        assertOutputKeyValueTimestamp(testOutputTopic, "k1", "AABB", 500);
        assertOutputKeyValueTimestamp(testOutputTopic, "k2", "AABBBB", 500);
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)152 Test (org.junit.Test)91 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)59 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)46 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)35 HashMap (java.util.HashMap)33 Properties (java.util.Properties)32 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)31 Windowed (org.apache.kafka.streams.kstream.Windowed)31 List (java.util.List)29 KeyValue (org.apache.kafka.streams.KeyValue)29 IntegrationTest (org.apache.kafka.test.IntegrationTest)27 ArrayList (java.util.ArrayList)26 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)25 Map (java.util.Map)20 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)20 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)17 Serdes (org.apache.kafka.common.serialization.Serdes)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)17 KStream (org.apache.kafka.streams.kstream.KStream)17