use of org.apache.kafka.connect.util.Callback in project kafka by apache.
the class KafkaStatusBackingStore method configure.
@Override
public void configure(WorkerConfig config) {
this.topic = config.getString(DistributedConfig.STATUS_STORAGE_TOPIC_CONFIG);
if (topic.equals(""))
throw new ConfigException("Must specify topic for connector status.");
Map<String, Object> producerProps = new HashMap<>();
producerProps.putAll(config.originals());
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
// we handle retries in this class
producerProps.put(ProducerConfig.RETRIES_CONFIG, 0);
Map<String, Object> consumerProps = new HashMap<>();
consumerProps.putAll(config.originals());
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
Callback<ConsumerRecord<String, byte[]>> readCallback = new Callback<ConsumerRecord<String, byte[]>>() {
@Override
public void onCompletion(Throwable error, ConsumerRecord<String, byte[]> record) {
read(record);
}
};
this.kafkaLog = new KafkaBasedLog<>(topic, producerProps, consumerProps, readCallback, time);
}
use of org.apache.kafka.connect.util.Callback in project kafka by apache.
the class KafkaOffsetBackingStoreTest method testGetSet.
@Test
public void testGetSet() throws Exception {
expectConfigure();
expectStart(Collections.EMPTY_LIST);
expectStop();
// First get() against an empty store
final Capture<Callback<Void>> firstGetReadToEndCallback = EasyMock.newCapture();
storeLog.readToEnd(EasyMock.capture(firstGetReadToEndCallback));
PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
firstGetReadToEndCallback.getValue().onCompletion(null, null);
return null;
}
});
// Set offsets
Capture<org.apache.kafka.clients.producer.Callback> callback0 = EasyMock.newCapture();
storeLog.send(EasyMock.aryEq(TP0_KEY.array()), EasyMock.aryEq(TP0_VALUE.array()), EasyMock.capture(callback0));
PowerMock.expectLastCall();
Capture<org.apache.kafka.clients.producer.Callback> callback1 = EasyMock.newCapture();
storeLog.send(EasyMock.aryEq(TP1_KEY.array()), EasyMock.aryEq(TP1_VALUE.array()), EasyMock.capture(callback1));
PowerMock.expectLastCall();
// Second get() should get the produced data and return the new values
final Capture<Callback<Void>> secondGetReadToEndCallback = EasyMock.newCapture();
storeLog.readToEnd(EasyMock.capture(secondGetReadToEndCallback));
PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 0, 0, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP0_KEY.array(), TP0_VALUE.array()));
capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 1, 0, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP1_KEY.array(), TP1_VALUE.array()));
secondGetReadToEndCallback.getValue().onCompletion(null, null);
return null;
}
});
// Third get() should pick up data produced by someone else and return those values
final Capture<Callback<Void>> thirdGetReadToEndCallback = EasyMock.newCapture();
storeLog.readToEnd(EasyMock.capture(thirdGetReadToEndCallback));
PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 0, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP0_KEY.array(), TP0_VALUE_NEW.array()));
capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP1_KEY.array(), TP1_VALUE_NEW.array()));
thirdGetReadToEndCallback.getValue().onCompletion(null, null);
return null;
}
});
PowerMock.replayAll();
store.configure(DEFAULT_DISTRIBUTED_CONFIG);
store.start();
// Getting from empty store should return nulls
final AtomicBoolean getInvokedAndPassed = new AtomicBoolean(false);
store.get(Arrays.asList(TP0_KEY, TP1_KEY), new Callback<Map<ByteBuffer, ByteBuffer>>() {
@Override
public void onCompletion(Throwable error, Map<ByteBuffer, ByteBuffer> result) {
// Since we didn't read them yet, these will be null
assertEquals(null, result.get(TP0_KEY));
assertEquals(null, result.get(TP1_KEY));
getInvokedAndPassed.set(true);
}
}).get(10000, TimeUnit.MILLISECONDS);
assertTrue(getInvokedAndPassed.get());
// Set some offsets
Map<ByteBuffer, ByteBuffer> toSet = new HashMap<>();
toSet.put(TP0_KEY, TP0_VALUE);
toSet.put(TP1_KEY, TP1_VALUE);
final AtomicBoolean invoked = new AtomicBoolean(false);
Future<Void> setFuture = store.set(toSet, new Callback<Void>() {
@Override
public void onCompletion(Throwable error, Void result) {
invoked.set(true);
}
});
assertFalse(setFuture.isDone());
// Out of order callbacks shouldn't matter, should still require all to be invoked before invoking the callback
// for the store's set callback
callback1.getValue().onCompletion(null, null);
assertFalse(invoked.get());
callback0.getValue().onCompletion(null, null);
setFuture.get(10000, TimeUnit.MILLISECONDS);
assertTrue(invoked.get());
// Getting data should read to end of our published data and return it
final AtomicBoolean secondGetInvokedAndPassed = new AtomicBoolean(false);
store.get(Arrays.asList(TP0_KEY, TP1_KEY), new Callback<Map<ByteBuffer, ByteBuffer>>() {
@Override
public void onCompletion(Throwable error, Map<ByteBuffer, ByteBuffer> result) {
assertEquals(TP0_VALUE, result.get(TP0_KEY));
assertEquals(TP1_VALUE, result.get(TP1_KEY));
secondGetInvokedAndPassed.set(true);
}
}).get(10000, TimeUnit.MILLISECONDS);
assertTrue(secondGetInvokedAndPassed.get());
// Getting data should read to end of our published data and return it
final AtomicBoolean thirdGetInvokedAndPassed = new AtomicBoolean(false);
store.get(Arrays.asList(TP0_KEY, TP1_KEY), new Callback<Map<ByteBuffer, ByteBuffer>>() {
@Override
public void onCompletion(Throwable error, Map<ByteBuffer, ByteBuffer> result) {
assertEquals(TP0_VALUE_NEW, result.get(TP0_KEY));
assertEquals(TP1_VALUE_NEW, result.get(TP1_KEY));
thirdGetInvokedAndPassed.set(true);
}
}).get(10000, TimeUnit.MILLISECONDS);
assertTrue(thirdGetInvokedAndPassed.get());
store.stop();
PowerMock.verifyAll();
}
use of org.apache.kafka.connect.util.Callback in project kafka by apache.
the class RestServerTest method checkCORSRequest.
public void checkCORSRequest(String corsDomain, String origin, String expectedHeader, String method) {
// To be able to set the Origin, we need to toggle this flag
System.setProperty("sun.net.http.allowRestrictedHeaders", "true");
final Capture<Callback<Collection<String>>> connectorsCallback = EasyMock.newCapture();
herder.connectors(EasyMock.capture(connectorsCallback));
PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
connectorsCallback.getValue().onCompletion(null, Arrays.asList("a", "b"));
return null;
}
});
PowerMock.replayAll();
Map<String, String> workerProps = baseWorkerProps();
workerProps.put(WorkerConfig.ACCESS_CONTROL_ALLOW_ORIGIN_CONFIG, corsDomain);
workerProps.put(WorkerConfig.ACCESS_CONTROL_ALLOW_METHODS_CONFIG, method);
WorkerConfig workerConfig = new StandaloneConfig(workerProps);
server = new RestServer(workerConfig);
server.start(herder);
Response response = request("/connectors").header("Referer", origin + "/page").header("Origin", origin).get();
assertEquals(200, response.getStatus());
assertEquals(expectedHeader, response.getHeaderString("Access-Control-Allow-Origin"));
response = request("/connector-plugins/FileStreamSource/validate").header("Referer", origin + "/page").header("Origin", origin).header("Access-Control-Request-Method", method).options();
assertEquals(404, response.getStatus());
assertEquals(expectedHeader, response.getHeaderString("Access-Control-Allow-Origin"));
assertEquals(method, response.getHeaderString("Access-Control-Allow-Methods"));
PowerMock.verifyAll();
}
use of org.apache.kafka.connect.util.Callback in project kafka by apache.
the class ConnectorsResourceTest method testRestartTaskNotFound.
@Test(expected = NotFoundException.class)
public void testRestartTaskNotFound() throws Throwable {
ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0);
final Capture<Callback<Void>> cb = Capture.newInstance();
herder.restartTask(EasyMock.eq(taskId), EasyMock.capture(cb));
expectAndCallbackException(cb, new NotFoundException("not found"));
PowerMock.replayAll();
connectorsResource.restartTask(CONNECTOR_NAME, 0, FORWARD);
PowerMock.verifyAll();
}
use of org.apache.kafka.connect.util.Callback in project kafka by apache.
the class ConnectorsResourceTest method testPutConnectorTaskConfigsConnectorNotFound.
@Test(expected = NotFoundException.class)
public void testPutConnectorTaskConfigsConnectorNotFound() throws Throwable {
final Capture<Callback<Void>> cb = Capture.newInstance();
herder.putTaskConfigs(EasyMock.eq(CONNECTOR_NAME), EasyMock.eq(TASK_CONFIGS), EasyMock.capture(cb));
expectAndCallbackException(cb, new NotFoundException("not found"));
PowerMock.replayAll();
connectorsResource.putTaskConfigs(CONNECTOR_NAME, FORWARD, TASK_CONFIGS);
PowerMock.verifyAll();
}
Aggregations