use of org.apache.kafka.connect.source.SourceRecord in project apache-kafka-on-k8s by banzaicloud.
the class TimestampConverterTest method testWithSchemaUnixToTimestamp.
@Test
public void testWithSchemaUnixToTimestamp() {
xformValue.configure(Collections.singletonMap(TimestampConverter.TARGET_TYPE_CONFIG, "Timestamp"));
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, Schema.INT64_SCHEMA, DATE_PLUS_TIME_UNIX));
assertEquals(Timestamp.SCHEMA, transformed.valueSchema());
assertEquals(DATE_PLUS_TIME.getTime(), transformed.value());
}
use of org.apache.kafka.connect.source.SourceRecord in project apache-kafka-on-k8s by banzaicloud.
the class Worker method buildWorkerTask.
private WorkerTask buildWorkerTask(ConnectorConfig connConfig, ConnectorTaskId id, Task task, TaskStatus.Listener statusListener, TargetState initialState, Converter keyConverter, Converter valueConverter, HeaderConverter headerConverter, ClassLoader loader) {
// Decide which type of worker task we need based on the type of task.
if (task instanceof SourceTask) {
TransformationChain<SourceRecord> transformationChain = new TransformationChain<>(connConfig.<SourceRecord>transformations());
OffsetStorageReader offsetReader = new OffsetStorageReaderImpl(offsetBackingStore, id.connector(), internalKeyConverter, internalValueConverter);
OffsetStorageWriter offsetWriter = new OffsetStorageWriter(offsetBackingStore, id.connector(), internalKeyConverter, internalValueConverter);
KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(producerProps);
return new WorkerSourceTask(id, (SourceTask) task, statusListener, initialState, keyConverter, valueConverter, headerConverter, transformationChain, producer, offsetReader, offsetWriter, config, metrics, loader, time);
} else if (task instanceof SinkTask) {
TransformationChain<SinkRecord> transformationChain = new TransformationChain<>(connConfig.<SinkRecord>transformations());
return new WorkerSinkTask(id, (SinkTask) task, statusListener, initialState, config, metrics, keyConverter, valueConverter, headerConverter, transformationChain, loader, time);
} else {
log.error("Tasks must be a subclass of either SourceTask or SinkTask", task);
throw new ConnectException("Tasks must be a subclass of either SourceTask or SinkTask");
}
}
use of org.apache.kafka.connect.source.SourceRecord in project kafka-connect-mqtt by jcustenborder.
the class MqttSourceTask method messageArrived.
@Override
public void messageArrived(String topic, MqttMessage message) throws Exception {
log.trace("messageArrived() - topic = '{}' message = '{}'", topic, message);
final SourceRecord record = this.sourceConverter.convert(topic, message);
this.records.add(record);
}
use of org.apache.kafka.connect.source.SourceRecord in project kafka-connect-kinesis by jcustenborder.
the class RecordConverter method sourceRecord.
public SourceRecord sourceRecord(final String streamName, final String shardId, Record record) {
byte[] data = new byte[record.getData().remaining()];
record.getData().get(data);
Struct key = new Struct(RecordConverter.SCHEMA_KINESIS_KEY).put(RecordConverter.FIELD_PARTITION_KEY, record.getPartitionKey());
Struct value = new Struct(RecordConverter.SCHEMA_KINESIS_VALUE).put(RecordConverter.FIELD_SEQUENCE_NUMBER, record.getSequenceNumber()).put(RecordConverter.FIELD_APPROXIMATE_ARRIVAL_TIMESTAMP, record.getApproximateArrivalTimestamp()).put(RecordConverter.FIELD_PARTITION_KEY, record.getPartitionKey()).put(RecordConverter.FIELD_DATA, data).put(RecordConverter.FIELD_STREAM_NAME, streamName).put(RecordConverter.FIELD_SHARD_ID, shardId);
final Map<String, Object> sourcePartition = ImmutableMap.of(RecordConverter.FIELD_SHARD_ID, this.config.kinesisShardId);
final Map<String, Object> sourceOffset = ImmutableMap.of(RecordConverter.FIELD_SEQUENCE_NUMBER, record.getSequenceNumber());
final SourceRecord sourceRecord = new SourceRecord(sourcePartition, sourceOffset, this.config.kafkaTopic, null, RecordConverter.SCHEMA_KINESIS_KEY, key, RecordConverter.SCHEMA_KINESIS_VALUE, value, record.getApproximateArrivalTimestamp().getTime());
return sourceRecord;
}
use of org.apache.kafka.connect.source.SourceRecord in project kafka-connect-kinesis by jcustenborder.
the class KinesisSourceTaskTest method sourceOffsets.
@Test
public void sourceOffsets() throws InterruptedException {
final String SEQUENCE_NUMBER = "asdfasdfddsa";
Map<String, Object> sourceOffset = ImmutableMap.of(RecordConverter.FIELD_SEQUENCE_NUMBER, SEQUENCE_NUMBER);
when(this.offsetStorageReader.offset(anyMap())).thenReturn(sourceOffset);
when(this.kinesisClient.getShardIterator(any())).thenReturn(new GetShardIteratorResult().withShardIterator("dfasdfsadfasdf"));
this.task.start(settings);
GetRecordsResult recordsResult = new GetRecordsResult().withNextShardIterator("dsfargadsfasdfasda").withRecords(TestData.record()).withMillisBehindLatest(0L);
when(this.kinesisClient.getRecords(any())).thenReturn(recordsResult);
List<SourceRecord> records = this.task.poll();
assertNotNull(records, "records should not be null.");
assertFalse(records.isEmpty(), "records should not be empty.");
verify(this.offsetStorageReader, atLeastOnce()).offset(anyMap());
GetShardIteratorRequest expectedIteratorRequest = new GetShardIteratorRequest().withShardIteratorType(ShardIteratorType.AFTER_SEQUENCE_NUMBER).withShardId(this.config.kinesisShardId).withStreamName(this.config.kinesisStreamName).withStartingSequenceNumber(SEQUENCE_NUMBER);
verify(this.kinesisClient, atLeastOnce()).getShardIterator(expectedIteratorRequest);
}
Aggregations