use of org.apache.kafka.common.internals.KafkaFutureImpl in project apache-kafka-on-k8s by banzaicloud.
the class MockAdminClient method listTopics.
@Override
public ListTopicsResult listTopics(ListTopicsOptions options) {
Map<String, TopicListing> topicListings = new HashMap<>();
if (timeoutNextRequests > 0) {
KafkaFutureImpl<Map<String, TopicListing>> future = new KafkaFutureImpl<>();
future.completeExceptionally(new TimeoutException());
--timeoutNextRequests;
return new ListTopicsResult(future);
}
for (Map.Entry<String, TopicMetadata> topicDescription : allTopics.entrySet()) {
String topicName = topicDescription.getKey();
topicListings.put(topicName, new TopicListing(topicName, topicDescription.getValue().isInternalTopic));
}
KafkaFutureImpl<Map<String, TopicListing>> future = new KafkaFutureImpl<>();
future.complete(topicListings);
return new ListTopicsResult(future);
}
use of org.apache.kafka.common.internals.KafkaFutureImpl in project apache-kafka-on-k8s by banzaicloud.
the class MockAdminClient method describeConfigs.
@Override
public DescribeConfigsResult describeConfigs(Collection<ConfigResource> resources, DescribeConfigsOptions options) {
Map<ConfigResource, KafkaFuture<Config>> configescriptions = new HashMap<>();
for (ConfigResource resource : resources) {
if (resource.type() == ConfigResource.Type.TOPIC) {
Map<String, String> configs = allTopics.get(resource.name()).configs;
List<ConfigEntry> configEntries = new ArrayList<>();
for (Map.Entry<String, String> entry : configs.entrySet()) {
configEntries.add(new ConfigEntry(entry.getKey(), entry.getValue()));
}
KafkaFutureImpl<Config> future = new KafkaFutureImpl<>();
future.complete(new Config(configEntries));
configescriptions.put(resource, future);
} else {
throw new UnsupportedOperationException("Not implemented yet");
}
}
return new DescribeConfigsResult(configescriptions);
}
use of org.apache.kafka.common.internals.KafkaFutureImpl in project apache-kafka-on-k8s by banzaicloud.
the class MockAdminClient method deleteTopics.
@Override
public DeleteTopicsResult deleteTopics(Collection<String> topicsToDelete, DeleteTopicsOptions options) {
Map<String, KafkaFuture<Void>> deleteTopicsResult = new HashMap<>();
if (timeoutNextRequests > 0) {
for (final String topicName : topicsToDelete) {
KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
future.completeExceptionally(new TimeoutException());
deleteTopicsResult.put(topicName, future);
}
--timeoutNextRequests;
return new DeleteTopicsResult(deleteTopicsResult);
}
for (final String topicName : topicsToDelete) {
KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
if (allTopics.remove(topicName) == null) {
future.completeExceptionally(new UnknownTopicOrPartitionException(String.format("Topic %s does not exist.", topicName)));
} else {
future.complete(null);
}
deleteTopicsResult.put(topicName, future);
}
return new DeleteTopicsResult(deleteTopicsResult);
}
use of org.apache.kafka.common.internals.KafkaFutureImpl in project apache-kafka-on-k8s by banzaicloud.
the class TaskManagerTest method shouldSendPurgeData.
@Test
public void shouldSendPurgeData() {
final KafkaFutureImpl<DeletedRecords> futureDeletedRecords = new KafkaFutureImpl<>();
final Map<TopicPartition, RecordsToDelete> recordsToDelete = Collections.singletonMap(t1p1, RecordsToDelete.beforeOffset(5L));
final DeleteRecordsResult deleteRecordsResult = new DeleteRecordsResult(Collections.singletonMap(t1p1, (KafkaFuture<DeletedRecords>) futureDeletedRecords));
futureDeletedRecords.complete(null);
EasyMock.expect(active.recordsToDelete()).andReturn(Collections.singletonMap(t1p1, 5L)).times(2);
EasyMock.expect(adminClient.deleteRecords(recordsToDelete)).andReturn(deleteRecordsResult).times(2);
replay();
taskManager.maybePurgeCommitedRecords();
taskManager.maybePurgeCommitedRecords();
verify(active, adminClient);
}
use of org.apache.kafka.common.internals.KafkaFutureImpl in project apache-kafka-on-k8s by banzaicloud.
the class TaskManagerTest method shouldIgnorePurgeDataErrors.
@Test
public void shouldIgnorePurgeDataErrors() {
final KafkaFutureImpl<DeletedRecords> futureDeletedRecords = new KafkaFutureImpl<>();
final Map<TopicPartition, RecordsToDelete> recordsToDelete = Collections.singletonMap(t1p1, RecordsToDelete.beforeOffset(5L));
final DeleteRecordsResult deleteRecordsResult = new DeleteRecordsResult(Collections.singletonMap(t1p1, (KafkaFuture<DeletedRecords>) futureDeletedRecords));
futureDeletedRecords.completeExceptionally(new Exception("KABOOM!"));
EasyMock.expect(active.recordsToDelete()).andReturn(Collections.singletonMap(t1p1, 5L)).times(2);
EasyMock.expect(adminClient.deleteRecords(recordsToDelete)).andReturn(deleteRecordsResult).times(2);
replay();
taskManager.maybePurgeCommitedRecords();
taskManager.maybePurgeCommitedRecords();
verify(active, adminClient);
}
Aggregations