use of org.apache.kafka.common.header.internals.RecordHeaders in project beam by apache.
the class NestedPayloadKafkaTableTest method readRecord.
private static KafkaRecord<byte[], byte[]> readRecord(byte[] key, byte[] value, long timestamp, ListMultimap<String, byte[]> attributes) {
Headers headers = new RecordHeaders();
attributes.forEach(headers::add);
return new KafkaRecord<>(TOPIC, 0, 0, timestamp, KafkaTimestampType.LOG_APPEND_TIME, headers, key, value);
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project druid by druid-io.
the class KafkaInputFormatTest method testTimestampFromHeader.
@Test
public void testTimestampFromHeader() throws IOException {
Iterable<Header> sample_header_with_ts = Iterables.unmodifiableIterable(Iterables.concat(SAMPLE_HEADERS, ImmutableList.of(new Header() {
@Override
public String key() {
return "headerTs";
}
@Override
public byte[] value() {
return "2021-06-24".getBytes(StandardCharsets.UTF_8);
}
})));
final byte[] key = StringUtils.toUtf8("{\n" + " \"key\": \"sampleKey\"\n" + "}");
final byte[] payload = StringUtils.toUtf8("{\n" + " \"timestamp\": \"2021-06-24\",\n" + " \"bar\": null,\n" + " \"foo\": \"x\",\n" + " \"baz\": 4,\n" + " \"o\": {\n" + " \"mg\": 1\n" + " }\n" + "}");
Headers headers = new RecordHeaders(sample_header_with_ts);
inputEntity = new KafkaRecordEntity(new ConsumerRecord<byte[], byte[]>("sample", 0, 0, timestamp, null, null, 0, 0, key, payload, headers));
final InputEntityReader reader = format.createReader(new InputRowSchema(new TimestampSpec("kafka.newheader.headerTs", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("bar", "foo", "kafka.newheader.encoding", "kafka.newheader.kafkapkc"))), ColumnsFilter.all()), inputEntity, null);
final int numExpectedIterations = 1;
try (CloseableIterator<InputRow> iterator = reader.read()) {
int numActualIterations = 0;
while (iterator.hasNext()) {
final InputRow row = iterator.next();
final MapBasedInputRow mrow = (MapBasedInputRow) row;
// Payload verifications
Assert.assertEquals(DateTimes.of("2021-06-24"), row.getTimestamp());
Assert.assertEquals("x", Iterables.getOnlyElement(row.getDimension("foo")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("baz")));
Assert.assertEquals("4", Iterables.getOnlyElement(row.getDimension("root_baz")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("path_omg")));
Assert.assertEquals("1", Iterables.getOnlyElement(row.getDimension("jq_omg")));
// Header verification
Assert.assertEquals("application/json", Iterables.getOnlyElement(row.getDimension("kafka.newheader.encoding")));
Assert.assertEquals("pkc-bar", Iterables.getOnlyElement(row.getDimension("kafka.newheader.kafkapkc")));
Assert.assertEquals(String.valueOf(DateTimes.of("2021-06-24").getMillis()), Iterables.getOnlyElement(row.getDimension("kafka.newts.timestamp")));
Assert.assertEquals("2021-06-24", Iterables.getOnlyElement(row.getDimension("kafka.newheader.headerTs")));
Assert.assertEquals("2021-06-24", Iterables.getOnlyElement(row.getDimension("timestamp")));
// Key verification
Assert.assertEquals("sampleKey", Iterables.getOnlyElement(row.getDimension("kafka.newkey.key")));
Assert.assertTrue(row.getDimension("root_baz2").isEmpty());
Assert.assertTrue(row.getDimension("path_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
Assert.assertTrue(row.getDimension("jq_omg2").isEmpty());
numActualIterations++;
}
Assert.assertEquals(numExpectedIterations, numActualIterations);
}
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project druid by druid-io.
the class KafkaStringHeaderFormatTest method testIllegalHeaderCharacter.
@Test
public void testIllegalHeaderCharacter() {
Iterable<Header> header = ImmutableList.of(new Header() {
@Override
public String key() {
return "encoding";
}
@Override
public byte[] value() {
return "€pplic€tion/json".getBytes(StandardCharsets.US_ASCII);
}
}, new Header() {
@Override
public String key() {
return "kafkapkc";
}
@Override
public byte[] value() {
return "pkc-bar".getBytes(StandardCharsets.US_ASCII);
}
});
String headerLabelPrefix = "test.kafka.header.";
Headers headers = new RecordHeaders(header);
inputEntity = new KafkaRecordEntity(new ConsumerRecord<byte[], byte[]>("sample", 0, 0, timestamp, null, null, 0, 0, null, "sampleValue".getBytes(StandardCharsets.UTF_8), headers));
List<Pair<String, Object>> expectedResults = Arrays.asList(Pair.of("test.kafka.header.encoding", "?pplic?tion/json"), Pair.of("test.kafka.header.kafkapkc", "pkc-bar"));
KafkaHeaderFormat headerInput = new KafkaStringHeaderFormat("US-ASCII");
KafkaHeaderReader headerParser = headerInput.createReader(inputEntity.getRecord().headers(), headerLabelPrefix);
List<Pair<String, Object>> rows = headerParser.read();
Assert.assertEquals(expectedResults, rows);
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project apache-kafka-on-k8s by banzaicloud.
the class ConsumerRecordTest method testOldConstructor.
@Test
@SuppressWarnings("deprecation")
public void testOldConstructor() {
String topic = "topic";
int partition = 0;
long offset = 23;
String key = "key";
String value = "value";
ConsumerRecord<String, String> record = new ConsumerRecord<>(topic, partition, offset, key, value);
assertEquals(topic, record.topic());
assertEquals(partition, record.partition());
assertEquals(offset, record.offset());
assertEquals(key, record.key());
assertEquals(value, record.value());
assertEquals(TimestampType.NO_TIMESTAMP_TYPE, record.timestampType());
assertEquals(ConsumerRecord.NO_TIMESTAMP, record.timestamp());
assertEquals(ConsumerRecord.NULL_CHECKSUM, record.checksum());
assertEquals(ConsumerRecord.NULL_SIZE, record.serializedKeySize());
assertEquals(ConsumerRecord.NULL_SIZE, record.serializedValueSize());
assertEquals(new RecordHeaders(), record.headers());
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project apache-kafka-on-k8s by banzaicloud.
the class MemoryRecordsTest method testHasRoomForMethodWithHeaders.
@Test
public void testHasRoomForMethodWithHeaders() {
if (magic >= RecordBatch.MAGIC_VALUE_V2) {
MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(100), magic, compression, TimestampType.CREATE_TIME, 0L);
RecordHeaders headers = new RecordHeaders();
headers.add("hello", "world.world".getBytes());
headers.add("hello", "world.world".getBytes());
headers.add("hello", "world.world".getBytes());
headers.add("hello", "world.world".getBytes());
headers.add("hello", "world.world".getBytes());
builder.append(logAppendTime, "key".getBytes(), "value".getBytes());
// Make sure that hasRoomFor accounts for header sizes by letting a record without headers pass, but stopping
// a record with a large number of headers.
assertTrue(builder.hasRoomFor(logAppendTime, "key".getBytes(), "value".getBytes(), Record.EMPTY_HEADERS));
assertFalse(builder.hasRoomFor(logAppendTime, "key".getBytes(), "value".getBytes(), headers.toArray()));
}
}
Aggregations