use of org.apache.kafka.common.requests.ApiError in project apache-kafka-on-k8s by banzaicloud.
the class KafkaAdminClientTest method testTimeoutWithoutMetadata.
/**
* Test that the client properly times out when we don't receive any metadata.
*/
@Test
public void testTimeoutWithoutMetadata() throws Exception {
try (AdminClientUnitTestEnv env = mockClientEnv(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "10")) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().setNode(new Node(0, "localhost", 8121));
env.kafkaClient().prepareResponse(new CreateTopicsResponse(Collections.singletonMap("myTopic", new ApiError(Errors.NONE, ""))));
KafkaFuture<Void> future = env.adminClient().createTopics(Collections.singleton(new NewTopic("myTopic", Collections.singletonMap(Integer.valueOf(0), asList(new Integer[] { 0, 1, 2 })))), new CreateTopicsOptions().timeoutMs(1000)).all();
assertFutureError(future, TimeoutException.class);
}
}
use of org.apache.kafka.common.requests.ApiError in project apache-kafka-on-k8s by banzaicloud.
the class KafkaAdminClientTest method testDescribeAcls.
@Test
public void testDescribeAcls() throws Exception {
try (AdminClientUnitTestEnv env = mockClientEnv()) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet());
env.kafkaClient().setNode(env.cluster().controller());
// Test a call where we get back ACL1 and ACL2.
env.kafkaClient().prepareResponse(new DescribeAclsResponse(0, ApiError.NONE, asList(ACL1, ACL2)));
assertCollectionIs(env.adminClient().describeAcls(FILTER1).values().get(), ACL1, ACL2);
// Test a call where we get back no results.
env.kafkaClient().prepareResponse(new DescribeAclsResponse(0, ApiError.NONE, Collections.<AclBinding>emptySet()));
assertTrue(env.adminClient().describeAcls(FILTER2).values().get().isEmpty());
// Test a call where we get back an error.
env.kafkaClient().prepareResponse(new DescribeAclsResponse(0, new ApiError(Errors.SECURITY_DISABLED, "Security is disabled"), Collections.<AclBinding>emptySet()));
assertFutureError(env.adminClient().describeAcls(FILTER2).values(), SecurityDisabledException.class);
}
}
use of org.apache.kafka.common.requests.ApiError in project apache-kafka-on-k8s by banzaicloud.
the class KafkaAdminClientTest method testCreateAcls.
@Test
public void testCreateAcls() throws Exception {
try (AdminClientUnitTestEnv env = mockClientEnv()) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet());
env.kafkaClient().setNode(env.cluster().controller());
// Test a call where we successfully create two ACLs.
env.kafkaClient().prepareResponse(new CreateAclsResponse(0, asList(new AclCreationResponse(ApiError.NONE), new AclCreationResponse(ApiError.NONE))));
CreateAclsResult results = env.adminClient().createAcls(asList(ACL1, ACL2));
assertCollectionIs(results.values().keySet(), ACL1, ACL2);
for (KafkaFuture<Void> future : results.values().values()) future.get();
results.all().get();
// Test a call where we fail to create one ACL.
env.kafkaClient().prepareResponse(new CreateAclsResponse(0, asList(new AclCreationResponse(new ApiError(Errors.SECURITY_DISABLED, "Security is disabled")), new AclCreationResponse(ApiError.NONE))));
results = env.adminClient().createAcls(asList(ACL1, ACL2));
assertCollectionIs(results.values().keySet(), ACL1, ACL2);
assertFutureError(results.values().get(ACL1), SecurityDisabledException.class);
results.values().get(ACL2).get();
assertFutureError(results.all(), SecurityDisabledException.class);
}
}
use of org.apache.kafka.common.requests.ApiError in project apache-kafka-on-k8s by banzaicloud.
the class KafkaAdminClient method createTopics.
@Override
public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) {
final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size());
final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size());
for (NewTopic newTopic : newTopics) {
if (topicNameIsUnrepresentable(newTopic.name())) {
KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
future.completeExceptionally(new InvalidTopicException("The given topic name '" + newTopic.name() + "' cannot be represented in a request."));
topicFutures.put(newTopic.name(), future);
} else if (!topicFutures.containsKey(newTopic.name())) {
topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>());
topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails());
}
}
final long now = time.milliseconds();
Call call = new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) {
@Override
public AbstractRequest.Builder createRequest(int timeoutMs) {
return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly());
}
@Override
public void handleResponse(AbstractResponse abstractResponse) {
CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse;
// Handle server responses for particular topics.
for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) {
KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey());
if (future == null) {
log.warn("Server response mentioned unknown topic {}", entry.getKey());
} else {
ApiException exception = entry.getValue().exception();
if (exception != null) {
future.completeExceptionally(exception);
} else {
future.complete(null);
}
}
}
// The server should send back a response for every topic. But do a sanity check anyway.
for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) {
KafkaFutureImpl<Void> future = entry.getValue();
if (!future.isDone()) {
future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey()));
}
}
}
@Override
void handleFailure(Throwable throwable) {
completeAllExceptionally(topicFutures.values(), throwable);
}
};
if (!topicsMap.isEmpty()) {
runnable.call(call, now);
}
return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures));
}
use of org.apache.kafka.common.requests.ApiError in project apache-kafka-on-k8s by banzaicloud.
the class KafkaAdminClientTest method testDeleteAcls.
@Test
public void testDeleteAcls() throws Exception {
try (AdminClientUnitTestEnv env = mockClientEnv()) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet());
env.kafkaClient().setNode(env.cluster().controller());
// Test a call where one filter has an error.
env.kafkaClient().prepareResponse(new DeleteAclsResponse(0, asList(new AclFilterResponse(asList(new AclDeletionResult(ACL1), new AclDeletionResult(ACL2))), new AclFilterResponse(new ApiError(Errors.SECURITY_DISABLED, "No security"), Collections.<AclDeletionResult>emptySet()))));
DeleteAclsResult results = env.adminClient().deleteAcls(asList(FILTER1, FILTER2));
Map<AclBindingFilter, KafkaFuture<FilterResults>> filterResults = results.values();
FilterResults filter1Results = filterResults.get(FILTER1).get();
assertEquals(null, filter1Results.values().get(0).exception());
assertEquals(ACL1, filter1Results.values().get(0).binding());
assertEquals(null, filter1Results.values().get(1).exception());
assertEquals(ACL2, filter1Results.values().get(1).binding());
assertFutureError(filterResults.get(FILTER2), SecurityDisabledException.class);
assertFutureError(results.all(), SecurityDisabledException.class);
// Test a call where one deletion result has an error.
env.kafkaClient().prepareResponse(new DeleteAclsResponse(0, asList(new AclFilterResponse(asList(new AclDeletionResult(ACL1), new AclDeletionResult(new ApiError(Errors.SECURITY_DISABLED, "No security"), ACL2))), new AclFilterResponse(Collections.<AclDeletionResult>emptySet()))));
results = env.adminClient().deleteAcls(asList(FILTER1, FILTER2));
assertTrue(results.values().get(FILTER2).get().values().isEmpty());
assertFutureError(results.all(), SecurityDisabledException.class);
// Test a call where there are no errors.
env.kafkaClient().prepareResponse(new DeleteAclsResponse(0, asList(new AclFilterResponse(asList(new AclDeletionResult(ACL1))), new AclFilterResponse(asList(new AclDeletionResult(ACL2))))));
results = env.adminClient().deleteAcls(asList(FILTER1, FILTER2));
Collection<AclBinding> deleted = results.all().get();
assertCollectionIs(deleted, ACL1, ACL2);
}
}
Aggregations