Search in sources :

Example 1 with ArrayOf

use of org.apache.kafka.common.protocol.types.ArrayOf in project kafka by apache.

the class Protocol method schemaToBnfHtml.

private static void schemaToBnfHtml(Schema schema, StringBuilder b, int indentSize) {
    final String indentStr = indentString(indentSize);
    final Map<String, Type> subTypes = new LinkedHashMap<>();
    // Top level fields
    for (Field field : schema.fields()) {
        if (field.type instanceof ArrayOf) {
            b.append("[");
            b.append(field.name);
            b.append("] ");
            Type innerType = ((ArrayOf) field.type).type();
            if (!subTypes.containsKey(field.name))
                subTypes.put(field.name, innerType);
        } else {
            b.append(field.name);
            b.append(" ");
            if (!subTypes.containsKey(field.name))
                subTypes.put(field.name, field.type);
        }
    }
    b.append("\n");
    // Sub Types/Schemas
    for (Map.Entry<String, Type> entry : subTypes.entrySet()) {
        if (entry.getValue() instanceof Schema) {
            // Complex Schema Type
            b.append(indentStr);
            b.append(entry.getKey());
            b.append(" => ");
            schemaToBnfHtml((Schema) entry.getValue(), b, indentSize + 2);
        } else {
            // Standard Field Type
            b.append(indentStr);
            b.append(entry.getKey());
            b.append(" => ");
            b.append(entry.getValue());
            b.append("\n");
        }
    }
}
Also used : Field(org.apache.kafka.common.protocol.types.Field) ArrayOf(org.apache.kafka.common.protocol.types.ArrayOf) Type(org.apache.kafka.common.protocol.types.Type) Schema(org.apache.kafka.common.protocol.types.Schema) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap)

Example 2 with ArrayOf

use of org.apache.kafka.common.protocol.types.ArrayOf in project kafka by apache.

the class ConsumerProtocolTest method deserializeNewSubscriptionVersion.

@Test
public void deserializeNewSubscriptionVersion() {
    // verify that a new version which adds a field is still parseable
    short version = 100;
    Schema subscriptionSchemaV100 = new Schema(new Field(ConsumerProtocol.TOPICS_KEY_NAME, new ArrayOf(Type.STRING)), new Field(ConsumerProtocol.USER_DATA_KEY_NAME, Type.BYTES), new Field("foo", Type.STRING));
    Struct subscriptionV100 = new Struct(subscriptionSchemaV100);
    subscriptionV100.set(ConsumerProtocol.TOPICS_KEY_NAME, new Object[] { "topic" });
    subscriptionV100.set(ConsumerProtocol.USER_DATA_KEY_NAME, ByteBuffer.wrap(new byte[0]));
    subscriptionV100.set("foo", "bar");
    Struct headerV100 = new Struct(ConsumerProtocol.CONSUMER_PROTOCOL_HEADER_SCHEMA);
    headerV100.set(ConsumerProtocol.VERSION_KEY_NAME, version);
    ByteBuffer buffer = ByteBuffer.allocate(subscriptionV100.sizeOf() + headerV100.sizeOf());
    headerV100.writeTo(buffer);
    subscriptionV100.writeTo(buffer);
    buffer.flip();
    Subscription subscription = ConsumerProtocol.deserializeSubscription(buffer);
    assertEquals(Arrays.asList("topic"), subscription.topics());
}
Also used : Field(org.apache.kafka.common.protocol.types.Field) ArrayOf(org.apache.kafka.common.protocol.types.ArrayOf) Schema(org.apache.kafka.common.protocol.types.Schema) Subscription(org.apache.kafka.clients.consumer.internals.PartitionAssignor.Subscription) ByteBuffer(java.nio.ByteBuffer) Struct(org.apache.kafka.common.protocol.types.Struct) Test(org.junit.Test)

Example 3 with ArrayOf

use of org.apache.kafka.common.protocol.types.ArrayOf in project kafka by apache.

the class ConsumerProtocolTest method deserializeNewAssignmentVersion.

@Test
public void deserializeNewAssignmentVersion() {
    // verify that a new version which adds a field is still parseable
    short version = 100;
    Schema assignmentSchemaV100 = new Schema(new Field(ConsumerProtocol.TOPIC_PARTITIONS_KEY_NAME, new ArrayOf(ConsumerProtocol.TOPIC_ASSIGNMENT_V0)), new Field(ConsumerProtocol.USER_DATA_KEY_NAME, Type.BYTES), new Field("foo", Type.STRING));
    Struct assignmentV100 = new Struct(assignmentSchemaV100);
    assignmentV100.set(ConsumerProtocol.TOPIC_PARTITIONS_KEY_NAME, new Object[] { new Struct(ConsumerProtocol.TOPIC_ASSIGNMENT_V0).set(ConsumerProtocol.TOPIC_KEY_NAME, "foo").set(ConsumerProtocol.PARTITIONS_KEY_NAME, new Object[] { 1 }) });
    assignmentV100.set(ConsumerProtocol.USER_DATA_KEY_NAME, ByteBuffer.wrap(new byte[0]));
    assignmentV100.set("foo", "bar");
    Struct headerV100 = new Struct(ConsumerProtocol.CONSUMER_PROTOCOL_HEADER_SCHEMA);
    headerV100.set(ConsumerProtocol.VERSION_KEY_NAME, version);
    ByteBuffer buffer = ByteBuffer.allocate(assignmentV100.sizeOf() + headerV100.sizeOf());
    headerV100.writeTo(buffer);
    assignmentV100.writeTo(buffer);
    buffer.flip();
    PartitionAssignor.Assignment assignment = ConsumerProtocol.deserializeAssignment(buffer);
    assertEquals(toSet(Arrays.asList(new TopicPartition("foo", 1))), toSet(assignment.partitions()));
}
Also used : Field(org.apache.kafka.common.protocol.types.Field) ArrayOf(org.apache.kafka.common.protocol.types.ArrayOf) TopicPartition(org.apache.kafka.common.TopicPartition) Schema(org.apache.kafka.common.protocol.types.Schema) ByteBuffer(java.nio.ByteBuffer) Struct(org.apache.kafka.common.protocol.types.Struct) Test(org.junit.Test)

Aggregations

ArrayOf (org.apache.kafka.common.protocol.types.ArrayOf)3 Field (org.apache.kafka.common.protocol.types.Field)3 Schema (org.apache.kafka.common.protocol.types.Schema)3 ByteBuffer (java.nio.ByteBuffer)2 Struct (org.apache.kafka.common.protocol.types.Struct)2 Test (org.junit.Test)2 LinkedHashMap (java.util.LinkedHashMap)1 Map (java.util.Map)1 Subscription (org.apache.kafka.clients.consumer.internals.PartitionAssignor.Subscription)1 TopicPartition (org.apache.kafka.common.TopicPartition)1 Type (org.apache.kafka.common.protocol.types.Type)1