use of org.apache.kafka.connect.sink.SinkRecord in project kafka by apache.
the class FileStreamSinkTask method put.
@Override
public void put(Collection<SinkRecord> sinkRecords) {
for (SinkRecord record : sinkRecords) {
log.trace("Writing line to {}: {}", logFilename(), record.value());
outputStream.println(record.value());
}
}
use of org.apache.kafka.connect.sink.SinkRecord in project kafka by apache.
the class WorkerSinkTaskTest method testRequestCommit.
@Test
public void testRequestCommit() throws Exception {
expectInitializeTask();
expectPollInitialAssignment();
expectConsumerPoll(1);
expectConversionAndTransformation(1);
sinkTask.put(EasyMock.<Collection<SinkRecord>>anyObject());
EasyMock.expectLastCall();
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
sinkTask.preCommit(offsets);
EasyMock.expectLastCall().andReturn(offsets);
final Capture<OffsetCommitCallback> callback = EasyMock.newCapture();
consumer.commitAsync(EasyMock.eq(offsets), EasyMock.capture(callback));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
callback.getValue().onComplete(offsets, null);
return null;
}
});
expectConsumerPoll(0);
sinkTask.put(Collections.<SinkRecord>emptyList());
EasyMock.expectLastCall();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
// initial assignment
workerTask.iteration();
// first record delivered
workerTask.iteration();
sinkTaskContext.getValue().requestCommit();
assertTrue(sinkTaskContext.getValue().isCommitRequested());
assertNotEquals(offsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
// triggers the commit
workerTask.iteration();
// should have been cleared
assertFalse(sinkTaskContext.getValue().isCommitRequested());
assertEquals(offsets, Whitebox.<Map<TopicPartition, OffsetAndMetadata>>getInternalState(workerTask, "lastCommittedOffsets"));
assertEquals(0, workerTask.commitFailures());
PowerMock.verifyAll();
}
use of org.apache.kafka.connect.sink.SinkRecord in project kafka by apache.
the class WorkerSinkTaskThreadedTest method testPollsInBackground.
@Test
public void testPollsInBackground() throws Exception {
expectInitializeTask();
expectPollInitialAssignment();
Capture<Collection<SinkRecord>> capturedRecords = expectPolls(1L);
expectStopTask();
PowerMock.replayAll();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
// First iteration initializes partition assignment
workerTask.iteration();
// Then we iterate to fetch data
for (int i = 0; i < 10; i++) {
workerTask.iteration();
}
workerTask.stop();
workerTask.close();
// Verify contents match expected values, i.e. that they were translated properly. With max
// batch size 1 and poll returns 1 message at a time, we should have a matching # of batches
assertEquals(10, capturedRecords.getValues().size());
int offset = 0;
for (Collection<SinkRecord> recs : capturedRecords.getValues()) {
assertEquals(1, recs.size());
for (SinkRecord rec : recs) {
SinkRecord referenceSinkRecord = new SinkRecord(TOPIC, PARTITION, KEY_SCHEMA, KEY, VALUE_SCHEMA, VALUE, FIRST_OFFSET + offset, TIMESTAMP, TIMESTAMP_TYPE);
assertEquals(referenceSinkRecord, rec);
offset++;
}
}
PowerMock.verifyAll();
}
use of org.apache.kafka.connect.sink.SinkRecord in project kafka by apache.
the class ExtractFieldTest method schemaless.
@Test
public void schemaless() {
final ExtractField<SinkRecord> xform = new ExtractField.Key<>();
xform.configure(Collections.singletonMap("field", "magic"));
final SinkRecord record = new SinkRecord("test", 0, null, Collections.singletonMap("magic", 42), null, null, 0);
final SinkRecord transformedRecord = xform.apply(record);
assertNull(transformedRecord.keySchema());
assertEquals(42, transformedRecord.key());
}
use of org.apache.kafka.connect.sink.SinkRecord in project kafka by apache.
the class HoistFieldTest method withSchema.
@Test
public void withSchema() {
final HoistField<SinkRecord> xform = new HoistField.Key<>();
xform.configure(Collections.singletonMap("field", "magic"));
final SinkRecord record = new SinkRecord("test", 0, Schema.INT32_SCHEMA, 42, null, null, 0);
final SinkRecord transformedRecord = xform.apply(record);
assertEquals(Schema.Type.STRUCT, transformedRecord.keySchema().type());
assertEquals(record.keySchema(), transformedRecord.keySchema().field("magic").schema());
assertEquals(42, ((Struct) transformedRecord.key()).get("magic"));
}
Aggregations