Search in sources :

Example 1 with Callback

use of org.apache.kafka.connect.util.Callback in project kafka by apache.

the class KafkaStatusBackingStore method configure.

@Override
public void configure(WorkerConfig config) {
    this.topic = config.getString(DistributedConfig.STATUS_STORAGE_TOPIC_CONFIG);
    if (topic.equals(""))
        throw new ConfigException("Must specify topic for connector status.");
    Map<String, Object> producerProps = new HashMap<>();
    producerProps.putAll(config.originals());
    producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
    // we handle retries in this class
    producerProps.put(ProducerConfig.RETRIES_CONFIG, 0);
    Map<String, Object> consumerProps = new HashMap<>();
    consumerProps.putAll(config.originals());
    consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    Callback<ConsumerRecord<String, byte[]>> readCallback = new Callback<ConsumerRecord<String, byte[]>>() {

        @Override
        public void onCompletion(Throwable error, ConsumerRecord<String, byte[]> record) {
            read(record);
        }
    };
    this.kafkaLog = new KafkaBasedLog<>(topic, producerProps, consumerProps, readCallback, time);
}
Also used : HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ConfigException(org.apache.kafka.common.config.ConfigException) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Callback(org.apache.kafka.connect.util.Callback) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer)

Example 2 with Callback

use of org.apache.kafka.connect.util.Callback in project kafka by apache.

the class KafkaOffsetBackingStoreTest method testGetSet.

@Test
public void testGetSet() throws Exception {
    expectConfigure();
    expectStart(Collections.EMPTY_LIST);
    expectStop();
    // First get() against an empty store
    final Capture<Callback<Void>> firstGetReadToEndCallback = EasyMock.newCapture();
    storeLog.readToEnd(EasyMock.capture(firstGetReadToEndCallback));
    PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            firstGetReadToEndCallback.getValue().onCompletion(null, null);
            return null;
        }
    });
    // Set offsets
    Capture<org.apache.kafka.clients.producer.Callback> callback0 = EasyMock.newCapture();
    storeLog.send(EasyMock.aryEq(TP0_KEY.array()), EasyMock.aryEq(TP0_VALUE.array()), EasyMock.capture(callback0));
    PowerMock.expectLastCall();
    Capture<org.apache.kafka.clients.producer.Callback> callback1 = EasyMock.newCapture();
    storeLog.send(EasyMock.aryEq(TP1_KEY.array()), EasyMock.aryEq(TP1_VALUE.array()), EasyMock.capture(callback1));
    PowerMock.expectLastCall();
    // Second get() should get the produced data and return the new values
    final Capture<Callback<Void>> secondGetReadToEndCallback = EasyMock.newCapture();
    storeLog.readToEnd(EasyMock.capture(secondGetReadToEndCallback));
    PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 0, 0, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP0_KEY.array(), TP0_VALUE.array()));
            capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 1, 0, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP1_KEY.array(), TP1_VALUE.array()));
            secondGetReadToEndCallback.getValue().onCompletion(null, null);
            return null;
        }
    });
    // Third get() should pick up data produced by someone else and return those values
    final Capture<Callback<Void>> thirdGetReadToEndCallback = EasyMock.newCapture();
    storeLog.readToEnd(EasyMock.capture(thirdGetReadToEndCallback));
    PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 0, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP0_KEY.array(), TP0_VALUE_NEW.array()));
            capturedConsumedCallback.getValue().onCompletion(null, new ConsumerRecord<>(TOPIC, 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, TP1_KEY.array(), TP1_VALUE_NEW.array()));
            thirdGetReadToEndCallback.getValue().onCompletion(null, null);
            return null;
        }
    });
    PowerMock.replayAll();
    store.configure(DEFAULT_DISTRIBUTED_CONFIG);
    store.start();
    // Getting from empty store should return nulls
    final AtomicBoolean getInvokedAndPassed = new AtomicBoolean(false);
    store.get(Arrays.asList(TP0_KEY, TP1_KEY), new Callback<Map<ByteBuffer, ByteBuffer>>() {

        @Override
        public void onCompletion(Throwable error, Map<ByteBuffer, ByteBuffer> result) {
            // Since we didn't read them yet, these will be null
            assertEquals(null, result.get(TP0_KEY));
            assertEquals(null, result.get(TP1_KEY));
            getInvokedAndPassed.set(true);
        }
    }).get(10000, TimeUnit.MILLISECONDS);
    assertTrue(getInvokedAndPassed.get());
    // Set some offsets
    Map<ByteBuffer, ByteBuffer> toSet = new HashMap<>();
    toSet.put(TP0_KEY, TP0_VALUE);
    toSet.put(TP1_KEY, TP1_VALUE);
    final AtomicBoolean invoked = new AtomicBoolean(false);
    Future<Void> setFuture = store.set(toSet, new Callback<Void>() {

        @Override
        public void onCompletion(Throwable error, Void result) {
            invoked.set(true);
        }
    });
    assertFalse(setFuture.isDone());
    // Out of order callbacks shouldn't matter, should still require all to be invoked before invoking the callback
    // for the store's set callback
    callback1.getValue().onCompletion(null, null);
    assertFalse(invoked.get());
    callback0.getValue().onCompletion(null, null);
    setFuture.get(10000, TimeUnit.MILLISECONDS);
    assertTrue(invoked.get());
    // Getting data should read to end of our published data and return it
    final AtomicBoolean secondGetInvokedAndPassed = new AtomicBoolean(false);
    store.get(Arrays.asList(TP0_KEY, TP1_KEY), new Callback<Map<ByteBuffer, ByteBuffer>>() {

        @Override
        public void onCompletion(Throwable error, Map<ByteBuffer, ByteBuffer> result) {
            assertEquals(TP0_VALUE, result.get(TP0_KEY));
            assertEquals(TP1_VALUE, result.get(TP1_KEY));
            secondGetInvokedAndPassed.set(true);
        }
    }).get(10000, TimeUnit.MILLISECONDS);
    assertTrue(secondGetInvokedAndPassed.get());
    // Getting data should read to end of our published data and return it
    final AtomicBoolean thirdGetInvokedAndPassed = new AtomicBoolean(false);
    store.get(Arrays.asList(TP0_KEY, TP1_KEY), new Callback<Map<ByteBuffer, ByteBuffer>>() {

        @Override
        public void onCompletion(Throwable error, Map<ByteBuffer, ByteBuffer> result) {
            assertEquals(TP0_VALUE_NEW, result.get(TP0_KEY));
            assertEquals(TP1_VALUE_NEW, result.get(TP1_KEY));
            thirdGetInvokedAndPassed.set(true);
        }
    }).get(10000, TimeUnit.MILLISECONDS);
    assertTrue(thirdGetInvokedAndPassed.get());
    store.stop();
    PowerMock.verifyAll();
}
Also used : HashMap(java.util.HashMap) ByteBuffer(java.nio.ByteBuffer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Callback(org.apache.kafka.connect.util.Callback) HashMap(java.util.HashMap) Map(java.util.Map) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 3 with Callback

use of org.apache.kafka.connect.util.Callback in project kafka by apache.

the class RestServerTest method checkCORSRequest.

public void checkCORSRequest(String corsDomain, String origin, String expectedHeader, String method) {
    // To be able to set the Origin, we need to toggle this flag
    System.setProperty("sun.net.http.allowRestrictedHeaders", "true");
    final Capture<Callback<Collection<String>>> connectorsCallback = EasyMock.newCapture();
    herder.connectors(EasyMock.capture(connectorsCallback));
    PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            connectorsCallback.getValue().onCompletion(null, Arrays.asList("a", "b"));
            return null;
        }
    });
    PowerMock.replayAll();
    Map<String, String> workerProps = baseWorkerProps();
    workerProps.put(WorkerConfig.ACCESS_CONTROL_ALLOW_ORIGIN_CONFIG, corsDomain);
    workerProps.put(WorkerConfig.ACCESS_CONTROL_ALLOW_METHODS_CONFIG, method);
    WorkerConfig workerConfig = new StandaloneConfig(workerProps);
    server = new RestServer(workerConfig);
    server.start(herder);
    Response response = request("/connectors").header("Referer", origin + "/page").header("Origin", origin).get();
    assertEquals(200, response.getStatus());
    assertEquals(expectedHeader, response.getHeaderString("Access-Control-Allow-Origin"));
    response = request("/connector-plugins/FileStreamSource/validate").header("Referer", origin + "/page").header("Origin", origin).header("Access-Control-Request-Method", method).options();
    assertEquals(404, response.getStatus());
    assertEquals(expectedHeader, response.getHeaderString("Access-Control-Allow-Origin"));
    assertEquals(method, response.getHeaderString("Access-Control-Allow-Methods"));
    PowerMock.verifyAll();
}
Also used : Response(javax.ws.rs.core.Response) Callback(org.apache.kafka.connect.util.Callback) WorkerConfig(org.apache.kafka.connect.runtime.WorkerConfig) StandaloneConfig(org.apache.kafka.connect.runtime.standalone.StandaloneConfig)

Example 4 with Callback

use of org.apache.kafka.connect.util.Callback in project kafka by apache.

the class ConnectorsResourceTest method testRestartTaskNotFound.

@Test(expected = NotFoundException.class)
public void testRestartTaskNotFound() throws Throwable {
    ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0);
    final Capture<Callback<Void>> cb = Capture.newInstance();
    herder.restartTask(EasyMock.eq(taskId), EasyMock.capture(cb));
    expectAndCallbackException(cb, new NotFoundException("not found"));
    PowerMock.replayAll();
    connectorsResource.restartTask(CONNECTOR_NAME, 0, FORWARD);
    PowerMock.verifyAll();
}
Also used : Callback(org.apache.kafka.connect.util.Callback) ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) NotFoundException(org.apache.kafka.connect.errors.NotFoundException) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 5 with Callback

use of org.apache.kafka.connect.util.Callback in project kafka by apache.

the class ConnectorsResourceTest method testPutConnectorTaskConfigsConnectorNotFound.

@Test(expected = NotFoundException.class)
public void testPutConnectorTaskConfigsConnectorNotFound() throws Throwable {
    final Capture<Callback<Void>> cb = Capture.newInstance();
    herder.putTaskConfigs(EasyMock.eq(CONNECTOR_NAME), EasyMock.eq(TASK_CONFIGS), EasyMock.capture(cb));
    expectAndCallbackException(cb, new NotFoundException("not found"));
    PowerMock.replayAll();
    connectorsResource.putTaskConfigs(CONNECTOR_NAME, FORWARD, TASK_CONFIGS);
    PowerMock.verifyAll();
}
Also used : Callback(org.apache.kafka.connect.util.Callback) NotFoundException(org.apache.kafka.connect.errors.NotFoundException) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

Callback (org.apache.kafka.connect.util.Callback)20 Test (org.junit.Test)18 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)18 HashMap (java.util.HashMap)8 NotFoundException (org.apache.kafka.connect.errors.NotFoundException)6 ConnectorInfo (org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo)6 Herder (org.apache.kafka.connect.runtime.Herder)5 RestServer (org.apache.kafka.connect.runtime.rest.RestServer)4 CreateConnectorRequest (org.apache.kafka.connect.runtime.rest.entities.CreateConnectorRequest)4 ByteBuffer (java.nio.ByteBuffer)3 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)3 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)3 ConnectorTaskId (org.apache.kafka.connect.util.ConnectorTaskId)3 Map (java.util.Map)2 NotAssignedException (org.apache.kafka.connect.runtime.distributed.NotAssignedException)2 ExecutionException (java.util.concurrent.ExecutionException)1 Response (javax.ws.rs.core.Response)1 KafkaException (org.apache.kafka.common.KafkaException)1 ConfigException (org.apache.kafka.common.config.ConfigException)1 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)1