Search in sources :

Example 21 with SchemaAndValue

use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.

the class StringConverterTest method testBytesToString.

@Test
public void testBytesToString() {
    SchemaAndValue data = converter.toConnectData(TOPIC, SAMPLE_STRING.getBytes());
    assertEquals(Schema.OPTIONAL_STRING_SCHEMA, data.schema());
    assertEquals(SAMPLE_STRING, data.value());
}
Also used : SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue) Test(org.junit.Test)

Example 22 with SchemaAndValue

use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.

the class StringConverterTest method testBytesToStringNonUtf8Encoding.

@Test
public void testBytesToStringNonUtf8Encoding() throws UnsupportedEncodingException {
    converter.configure(Collections.singletonMap("converter.encoding", "UTF-16"), true);
    SchemaAndValue data = converter.toConnectData(TOPIC, SAMPLE_STRING.getBytes("UTF-16"));
    assertEquals(Schema.OPTIONAL_STRING_SCHEMA, data.schema());
    assertEquals(SAMPLE_STRING, data.value());
}
Also used : SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue) Test(org.junit.Test)

Example 23 with SchemaAndValue

use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.

the class WorkerSinkTaskThreadedTest method expectOnePoll.

@SuppressWarnings("unchecked")
private IExpectationSetters<Object> expectOnePoll() {
    // Currently the SinkTask's put() method will not be invoked unless we provide some data, so instead of
    // returning empty data, we return one record. The expectation is that the data will be ignored by the
    // response behavior specified using the return value of this method.
    EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(new IAnswer<ConsumerRecords<byte[], byte[]>>() {

        @Override
        public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
            // "Sleep" so time will progress
            time.sleep(1L);
            ConsumerRecords<byte[], byte[]> records = new ConsumerRecords<>(Collections.singletonMap(new TopicPartition(TOPIC, PARTITION), Arrays.asList(new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturned, TIMESTAMP, TIMESTAMP_TYPE, 0L, 0, 0, RAW_KEY, RAW_VALUE))));
            recordsReturned++;
            return records;
        }
    });
    EasyMock.expect(keyConverter.toConnectData(TOPIC, RAW_KEY)).andReturn(new SchemaAndValue(KEY_SCHEMA, KEY));
    EasyMock.expect(valueConverter.toConnectData(TOPIC, RAW_VALUE)).andReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE));
    sinkTask.put(EasyMock.anyObject(Collection.class));
    return EasyMock.expectLastCall();
}
Also used : TopicPartition(org.apache.kafka.common.TopicPartition) Collection(java.util.Collection) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue)

Example 24 with SchemaAndValue

use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.

the class WorkerSinkTaskThreadedTest method expectRebalanceDuringPoll.

@SuppressWarnings("unchecked")
private IExpectationSetters<Object> expectRebalanceDuringPoll() throws Exception {
    final List<TopicPartition> partitions = Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3);
    final long startOffset = 40L;
    final Map<TopicPartition, Long> offsets = new HashMap<>();
    offsets.put(TOPIC_PARTITION, startOffset);
    EasyMock.expect(consumer.poll(EasyMock.anyLong())).andAnswer(new IAnswer<ConsumerRecords<byte[], byte[]>>() {

        @Override
        public ConsumerRecords<byte[], byte[]> answer() throws Throwable {
            // "Sleep" so time will progress
            time.sleep(1L);
            sinkTaskContext.getValue().offset(offsets);
            rebalanceListener.getValue().onPartitionsAssigned(partitions);
            ConsumerRecords<byte[], byte[]> records = new ConsumerRecords<>(Collections.singletonMap(new TopicPartition(TOPIC, PARTITION), Arrays.asList(new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturned, TIMESTAMP, TIMESTAMP_TYPE, 0L, 0, 0, RAW_KEY, RAW_VALUE))));
            recordsReturned++;
            return records;
        }
    });
    EasyMock.expect(consumer.position(TOPIC_PARTITION)).andReturn(FIRST_OFFSET);
    EasyMock.expect(consumer.position(TOPIC_PARTITION2)).andReturn(FIRST_OFFSET);
    EasyMock.expect(consumer.position(TOPIC_PARTITION3)).andReturn(FIRST_OFFSET);
    sinkTask.open(partitions);
    EasyMock.expectLastCall();
    consumer.seek(TOPIC_PARTITION, startOffset);
    EasyMock.expectLastCall();
    EasyMock.expect(keyConverter.toConnectData(TOPIC, RAW_KEY)).andReturn(new SchemaAndValue(KEY_SCHEMA, KEY));
    EasyMock.expect(valueConverter.toConnectData(TOPIC, RAW_VALUE)).andReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE));
    sinkTask.put(EasyMock.anyObject(Collection.class));
    return EasyMock.expectLastCall();
}
Also used : HashMap(java.util.HashMap) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue) TopicPartition(org.apache.kafka.common.TopicPartition) Collection(java.util.Collection)

Example 25 with SchemaAndValue

use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.

the class KafkaStatusBackingStoreTest method readTaskState.

@Test
public void readTaskState() {
    byte[] value = new byte[0];
    KafkaBasedLog<String, byte[]> kafkaBasedLog = mock(KafkaBasedLog.class);
    Converter converter = mock(Converter.class);
    KafkaStatusBackingStore store = new KafkaStatusBackingStore(new MockTime(), converter, STATUS_TOPIC, kafkaBasedLog);
    Map<String, Object> statusMap = new HashMap<>();
    statusMap.put("worker_id", WORKER_ID);
    statusMap.put("state", "RUNNING");
    statusMap.put("generation", 0L);
    expect(converter.toConnectData(STATUS_TOPIC, value)).andReturn(new SchemaAndValue(null, statusMap));
    replayAll();
    store.read(consumerRecord(0, "status-task-conn-0", value));
    TaskStatus status = new TaskStatus(TASK, TaskStatus.State.RUNNING, WORKER_ID, 0);
    assertEquals(status, store.get(TASK));
    verifyAll();
}
Also used : HashMap(java.util.HashMap) EasyMock.anyObject(org.easymock.EasyMock.anyObject) TaskStatus(org.apache.kafka.connect.runtime.TaskStatus) MockTime(org.apache.kafka.common.utils.MockTime) SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue) Test(org.junit.Test)

Aggregations

SchemaAndValue (org.apache.kafka.connect.data.SchemaAndValue)46 Test (org.junit.Test)36 Schema (org.apache.kafka.connect.data.Schema)19 HashMap (java.util.HashMap)14 Date (org.apache.kafka.connect.data.Date)9 Struct (org.apache.kafka.connect.data.Struct)6 BigInteger (java.math.BigInteger)5 Map (java.util.Map)5 MockTime (org.apache.kafka.common.utils.MockTime)5 ConnectorStatus (org.apache.kafka.connect.runtime.ConnectorStatus)5 EasyMock.anyObject (org.easymock.EasyMock.anyObject)5 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)4 BigDecimal (java.math.BigDecimal)3 Collection (java.util.Collection)3 GregorianCalendar (java.util.GregorianCalendar)3 ConsumerRecords (org.apache.kafka.clients.consumer.ConsumerRecords)3 TopicPartition (org.apache.kafka.common.TopicPartition)3 TaskStatus (org.apache.kafka.connect.runtime.TaskStatus)3 ByteBuffer (java.nio.ByteBuffer)2 LinkedHashMap (java.util.LinkedHashMap)2