use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class KafkaConsumerTest method testOsDefaultSocketBufferSizes.
@Test
public void testOsDefaultSocketBufferSizes() throws Exception {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
config.put(ConsumerConfig.SEND_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE);
config.put(ConsumerConfig.RECEIVE_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE);
KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer());
consumer.close();
}
use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenDeleted.
@Test
public void testRegexMatchesTopicsAWhenDeleted() throws Exception {
final Serde<String> stringSerde = Serdes.String();
final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-A", "TEST-TOPIC-B");
final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-B");
CLUSTER.createTopics("TEST-TOPIC-A", "TEST-TOPIC-B");
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-[A-Z]"));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final List<String> assignedTopics = new ArrayList<>();
streams = new KafkaStreams(builder.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {
@Override
public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {
@Override
public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
super.subscribe(topics, new TheConsumerRebalanceListener(assignedTopics, listener));
}
};
}
});
streams.start();
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return assignedTopics.equals(expectedFirstAssignment);
}
}, STREAM_TASKS_NOT_UPDATED);
CLUSTER.deleteTopic("TEST-TOPIC-A");
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return assignedTopics.equals(expectedSecondAssignment);
}
}, STREAM_TASKS_NOT_UPDATED);
}
use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenCreated.
@Test
public void testRegexMatchesTopicsAWhenCreated() throws Exception {
final Serde<String> stringSerde = Serdes.String();
final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-1");
final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-1", "TEST-TOPIC-2");
final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
CLUSTER.createTopic("TEST-TOPIC-1");
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-\\d"));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final List<String> assignedTopics = new ArrayList<>();
streams = new KafkaStreams(builder.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {
@Override
public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {
@Override
public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
super.subscribe(topics, new TheConsumerRebalanceListener(assignedTopics, listener));
}
};
}
});
streams.start();
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return assignedTopics.equals(expectedFirstAssignment);
}
}, STREAM_TASKS_NOT_UPDATED);
CLUSTER.createTopic("TEST-TOPIC-2");
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return assignedTopics.equals(expectedSecondAssignment);
}
}, STREAM_TASKS_NOT_UPDATED);
}
use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class RegexSourceIntegrationTest method testMultipleConsumersCanReadFromPartitionedTopic.
@Test
public void testMultipleConsumersCanReadFromPartitionedTopic() throws Exception {
KafkaStreams partitionedStreamsLeader = null;
KafkaStreams partitionedStreamsFollower = null;
try {
final Serde<String> stringSerde = Serdes.String();
final StreamsBuilder builderLeader = new StreamsBuilder();
final StreamsBuilder builderFollower = new StreamsBuilder();
final List<String> expectedAssignment = Arrays.asList(PARTITIONED_TOPIC_1, PARTITIONED_TOPIC_2);
final KStream<String, String> partitionedStreamLeader = builderLeader.stream(Pattern.compile("partitioned-\\d"));
final KStream<String, String> partitionedStreamFollower = builderFollower.stream(Pattern.compile("partitioned-\\d"));
partitionedStreamLeader.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
partitionedStreamFollower.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final List<String> leaderAssignment = new ArrayList<>();
final List<String> followerAssignment = new ArrayList<>();
partitionedStreamsLeader = new KafkaStreams(builderLeader.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {
@Override
public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {
@Override
public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
super.subscribe(topics, new TheConsumerRebalanceListener(leaderAssignment, listener));
}
};
}
});
partitionedStreamsFollower = new KafkaStreams(builderFollower.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {
@Override
public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {
@Override
public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
super.subscribe(topics, new TheConsumerRebalanceListener(followerAssignment, listener));
}
};
}
});
partitionedStreamsLeader.start();
partitionedStreamsFollower.start();
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return followerAssignment.equals(expectedAssignment) && leaderAssignment.equals(expectedAssignment);
}
}, "topic assignment not completed");
} finally {
if (partitionedStreamsLeader != null) {
partitionedStreamsLeader.close();
}
if (partitionedStreamsFollower != null) {
partitionedStreamsFollower.close();
}
}
}
use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class StreamsResetter method maybeResetInputAndSeekToEndIntermediateTopicOffsets.
private int maybeResetInputAndSeekToEndIntermediateTopicOffsets(final Map consumerConfig, final boolean dryRun) throws Exception {
final List<String> inputTopics = options.valuesOf(inputTopicsOption);
final List<String> intermediateTopics = options.valuesOf(intermediateTopicsOption);
int topicNotFound = EXIT_CODE_SUCCESS;
final List<String> notFoundInputTopics = new ArrayList<>();
final List<String> notFoundIntermediateTopics = new ArrayList<>();
final String groupId = options.valueOf(applicationIdOption);
if (inputTopics.size() == 0 && intermediateTopics.size() == 0) {
System.out.println("No input or intermediate topics specified. Skipping seek.");
return EXIT_CODE_SUCCESS;
}
if (inputTopics.size() != 0) {
System.out.println("Reset-offsets for input topics " + inputTopics);
}
if (intermediateTopics.size() != 0) {
System.out.println("Seek-to-end for intermediate topics " + intermediateTopics);
}
final Set<String> topicsToSubscribe = new HashSet<>(inputTopics.size() + intermediateTopics.size());
for (final String topic : inputTopics) {
if (!allTopics.contains(topic)) {
notFoundInputTopics.add(topic);
} else {
topicsToSubscribe.add(topic);
}
}
for (final String topic : intermediateTopics) {
if (!allTopics.contains(topic)) {
notFoundIntermediateTopics.add(topic);
} else {
topicsToSubscribe.add(topic);
}
}
if (!notFoundInputTopics.isEmpty()) {
System.out.println("Following input topics are not found, skipping them");
for (final String topic : notFoundInputTopics) {
System.out.println("Topic: " + topic);
}
topicNotFound = EXIT_CODE_ERROR;
}
if (!notFoundIntermediateTopics.isEmpty()) {
System.out.println("Following intermediate topics are not found, skipping them");
for (final String topic : notFoundIntermediateTopics) {
System.out.println("Topic:" + topic);
}
topicNotFound = EXIT_CODE_ERROR;
}
// try to poll with an empty subscription)
if (topicsToSubscribe.isEmpty()) {
return topicNotFound;
}
final Properties config = new Properties();
config.putAll(consumerConfig);
config.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
config.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
try (final KafkaConsumer<byte[], byte[]> client = new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer())) {
client.subscribe(topicsToSubscribe);
client.poll(1);
final Set<TopicPartition> partitions = client.assignment();
final Set<TopicPartition> inputTopicPartitions = new HashSet<>();
final Set<TopicPartition> intermediateTopicPartitions = new HashSet<>();
for (final TopicPartition p : partitions) {
final String topic = p.topic();
if (isInputTopic(topic)) {
inputTopicPartitions.add(p);
} else if (isIntermediateTopic(topic)) {
intermediateTopicPartitions.add(p);
} else {
System.err.println("Skipping invalid partition: " + p);
}
}
maybeReset(groupId, client, inputTopicPartitions);
maybeSeekToEnd(groupId, client, intermediateTopicPartitions);
if (!dryRun) {
for (final TopicPartition p : partitions) {
client.position(p);
}
client.commitSync();
}
} catch (final Exception e) {
System.err.println("ERROR: Resetting offsets failed.");
throw e;
}
System.out.println("Done.");
return topicNotFound;
}
Aggregations