use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.
the class KafkaConfigBackingStoreTest method testRecordToRestartRequest.
@Test
public void testRecordToRestartRequest() {
ConsumerRecord<String, byte[]> record = new ConsumerRecord<>(TOPIC, 0, 0, 0L, TimestampType.CREATE_TIME, 0, 0, RESTART_CONNECTOR_KEYS.get(0), CONFIGS_SERIALIZED.get(0), new RecordHeaders(), Optional.empty());
Struct struct = RESTART_REQUEST_STRUCTS.get(0);
SchemaAndValue schemaAndValue = new SchemaAndValue(struct.schema(), structToMap(struct));
RestartRequest restartRequest = configStorage.recordToRestartRequest(record, schemaAndValue);
assertEquals(CONNECTOR_1_NAME, restartRequest.connectorName());
assertEquals(struct.getBoolean(INCLUDE_TASKS_FIELD_NAME), restartRequest.includeTasks());
assertEquals(struct.getBoolean(ONLY_FAILED_FIELD_NAME), restartRequest.onlyFailed());
}
use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.
the class WorkerSourceTaskTest method testHeaders.
@Test
public void testHeaders() throws Exception {
Headers headers = new RecordHeaders();
headers.add("header_key", "header_value".getBytes());
org.apache.kafka.connect.header.Headers connectHeaders = new ConnectHeaders();
connectHeaders.add("header_key", new SchemaAndValue(Schema.STRING_SCHEMA, "header_value"));
createWorkerTask();
List<SourceRecord> records = new ArrayList<>();
records.add(new SourceRecord(PARTITION, OFFSET, TOPIC, null, KEY_SCHEMA, KEY, RECORD_SCHEMA, RECORD, null, connectHeaders));
expectTopicCreation(TOPIC);
Capture<ProducerRecord<byte[], byte[]>> sent = expectSendRecord(TOPIC, true, true, true, true, headers);
PowerMock.replayAll();
Whitebox.setInternalState(workerTask, "toSend", records);
Whitebox.invokeMethod(workerTask, "sendRecords");
assertEquals(SERIALIZED_KEY, sent.getValue().key());
assertEquals(SERIALIZED_RECORD, sent.getValue().value());
assertEquals(headers, sent.getValue().headers());
PowerMock.verifyAll();
}
use of org.apache.kafka.connect.data.SchemaAndValue in project kafka by apache.
the class WorkerSinkTaskTest method expectConversionAndTransformation.
private void expectConversionAndTransformation(final int numMessages, final String topicPrefix, final Headers headers) {
EasyMock.expect(keyConverter.toConnectData(TOPIC, headers, RAW_KEY)).andReturn(new SchemaAndValue(KEY_SCHEMA, KEY)).times(numMessages);
EasyMock.expect(valueConverter.toConnectData(TOPIC, headers, RAW_VALUE)).andReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE)).times(numMessages);
for (Header header : headers) {
EasyMock.expect(headerConverter.toConnectHeader(TOPIC, header.key(), header.value())).andReturn(new SchemaAndValue(VALUE_SCHEMA, new String(header.value()))).times(1);
}
expectTransformation(numMessages, topicPrefix);
}
use of org.apache.kafka.connect.data.SchemaAndValue in project connect-utils by jcustenborder.
the class BaseKeyValueTransformation method apply.
@Override
public R apply(R record) {
SchemaAndValue key = new SchemaAndValue(record.keySchema(), record.key());
SchemaAndValue value = new SchemaAndValue(record.valueSchema(), record.value());
final SchemaAndValue input = this.isKey ? key : value;
final SchemaAndValue result = process(record, input);
if (this.isKey) {
key = result;
} else {
value = result;
}
return record.newRecord(record.topic(), record.kafkaPartition(), key.schema(), key.value(), value.schema(), value.value(), record.timestamp(), record.headers());
}
use of org.apache.kafka.connect.data.SchemaAndValue in project connect-utils by jcustenborder.
the class StructHelperTest method generate.
SchemaAndValue generate(int count) {
SchemaBuilder builder = SchemaBuilder.struct();
for (int i = 1; i <= count; i++) {
String fieldName = String.format("f%s", i);
builder.field(fieldName, Schema.INT32_SCHEMA);
}
final Schema schema = builder.build();
final Struct struct = new Struct(schema);
for (int i = 1; i <= count; i++) {
String fieldName = String.format("f%s", i);
struct.put(fieldName, i);
}
return new SchemaAndValue(schema, struct);
}
Aggregations