Search in sources :

Example 56 with RecordHeaders

use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.

the class BufferValueTest method shouldSerializeOld.

@Test
public void shouldSerializeOld() {
    final ProcessorRecordContext context = new ProcessorRecordContext(0L, 0L, 0, "topic", new RecordHeaders());
    final byte[] serializedContext = context.serialize();
    final byte[] oldValue = { (byte) 5 };
    final byte[] bytes = new BufferValue(null, oldValue, null, context).serialize(0).array();
    final byte[] withoutContext = Arrays.copyOfRange(bytes, serializedContext.length, bytes.length);
    assertThat(withoutContext, is(ByteBuffer.allocate(Integer.BYTES * 3 + 1).putInt(-1).putInt(1).put(oldValue).putInt(-1).array()));
}
Also used : RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) Test(org.junit.Test)

Example 57 with RecordHeaders

use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.

the class BufferValueTest method shouldDeserializeNew.

@Test
public void shouldDeserializeNew() {
    final ProcessorRecordContext context = new ProcessorRecordContext(0L, 0L, 0, "topic", new RecordHeaders());
    final byte[] serializedContext = context.serialize();
    final byte[] newValue = { (byte) 5 };
    final ByteBuffer serialValue = ByteBuffer.allocate(serializedContext.length + Integer.BYTES * 3 + newValue.length).put(serializedContext).putInt(-1).putInt(-1).putInt(1).put(newValue);
    serialValue.position(0);
    assertThat(BufferValue.deserialize(serialValue), is(new BufferValue(null, null, newValue, context)));
}
Also used : RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 58 with RecordHeaders

use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.

the class BufferValueTest method shouldSerializeNew.

@Test
public void shouldSerializeNew() {
    final ProcessorRecordContext context = new ProcessorRecordContext(0L, 0L, 0, "topic", new RecordHeaders());
    final byte[] serializedContext = context.serialize();
    final byte[] newValue = { (byte) 5 };
    final byte[] bytes = new BufferValue(null, null, newValue, context).serialize(0).array();
    final byte[] withoutContext = Arrays.copyOfRange(bytes, serializedContext.length, bytes.length);
    assertThat(withoutContext, is(ByteBuffer.allocate(Integer.BYTES * 3 + 1).putInt(-1).putInt(-1).putInt(1).put(newValue).array()));
}
Also used : RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) Test(org.junit.Test)

Example 59 with RecordHeaders

use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.

the class BufferValueTest method shouldSerializeNulls.

@Test
public void shouldSerializeNulls() {
    final ProcessorRecordContext context = new ProcessorRecordContext(0L, 0L, 0, "topic", new RecordHeaders());
    final byte[] serializedContext = context.serialize();
    final byte[] bytes = new BufferValue(null, null, null, context).serialize(0).array();
    final byte[] withoutContext = Arrays.copyOfRange(bytes, serializedContext.length, bytes.length);
    assertThat(withoutContext, is(ByteBuffer.allocate(Integer.BYTES * 3).putInt(-1).putInt(-1).putInt(-1).array()));
}
Also used : RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) Test(org.junit.Test)

Example 60 with RecordHeaders

use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.

the class BufferValueTest method shouldSerializePrior.

@Test
public void shouldSerializePrior() {
    final ProcessorRecordContext context = new ProcessorRecordContext(0L, 0L, 0, "topic", new RecordHeaders());
    final byte[] serializedContext = context.serialize();
    final byte[] priorValue = { (byte) 5 };
    final byte[] bytes = new BufferValue(priorValue, null, null, context).serialize(0).array();
    final byte[] withoutContext = Arrays.copyOfRange(bytes, serializedContext.length, bytes.length);
    assertThat(withoutContext, is(ByteBuffer.allocate(Integer.BYTES * 3 + 1).putInt(1).put(priorValue).putInt(-1).putInt(-1).array()));
}
Also used : RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) Test(org.junit.Test)

Aggregations

RecordHeaders (org.apache.kafka.common.header.internals.RecordHeaders)149 Test (org.junit.Test)107 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)49 ProcessorRecordContext (org.apache.kafka.streams.processor.internals.ProcessorRecordContext)41 Headers (org.apache.kafka.common.header.Headers)33 RecordHeader (org.apache.kafka.common.header.internals.RecordHeader)24 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)24 TopicPartition (org.apache.kafka.common.TopicPartition)22 Position (org.apache.kafka.streams.query.Position)17 ArrayList (java.util.ArrayList)12 HashMap (java.util.HashMap)12 ByteBuffer (java.nio.ByteBuffer)11 Struct (org.apache.kafka.connect.data.Struct)11 Test (org.junit.jupiter.api.Test)11 Header (org.apache.kafka.common.header.Header)10 LinkedHashMap (java.util.LinkedHashMap)9 Bytes (org.apache.kafka.common.utils.Bytes)9 StreamsException (org.apache.kafka.streams.errors.StreamsException)9 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)8 Metrics (org.apache.kafka.common.metrics.Metrics)8