use of io.confluent.kafkarest.entities.RegisteredSchema in project kafka-rest by confluentinc.
the class SchemaManagerImplTest method getSchema_avro_rawSchema.
@Test
public void getSchema_avro_rawSchema() throws Exception {
RegisteredSchema actual = schemaManager.getSchema(TOPIC_NAME, /* format= */
Optional.of(EmbeddedFormat.AVRO), /* subject= */
Optional.empty(), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.empty(), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.of("{\"type\": \"int\"}"), /* isKey= */
true);
ParsedSchema schema = schemaRegistryClient.getSchemaById(actual.getSchemaId());
int schemaId = schemaRegistryClient.getId(KEY_SUBJECT, schema);
int schemaVersion = schemaRegistryClient.getVersion(KEY_SUBJECT, schema);
assertEquals(RegisteredSchema.create(KEY_SUBJECT, schemaId, schemaVersion, schema), actual);
}
use of io.confluent.kafkarest.entities.RegisteredSchema in project kafka-rest by confluentinc.
the class SchemaManagerImplTest method getSchema_avro_schemaId.
@Test
public void getSchema_avro_schemaId() throws Exception {
ParsedSchema schema = new AvroSchema("{\"type\": \"int\"}");
int schemaId = schemaRegistryClient.register(KEY_SUBJECT, schema);
int schemaVersion = schemaRegistryClient.getVersion(KEY_SUBJECT, schema);
RegisteredSchema actual = schemaManager.getSchema(TOPIC_NAME, /* format= */
Optional.empty(), /* subject= */
Optional.empty(), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.of(schemaId), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.empty(), /* isKey= */
true);
assertEquals(RegisteredSchema.create(KEY_SUBJECT, schemaId, schemaVersion, schema), actual);
}
use of io.confluent.kafkarest.entities.RegisteredSchema in project kafka-rest by confluentinc.
the class TopicsResourceAvroProduceTest method produceToTopic.
private Response produceToTopic(ProduceRequest request, List<RecordMetadata> results) {
RegisteredSchema registeredKeySchema = RegisteredSchema.create(TOPIC_NAME + "key", /* schemaId= */
1, /* schemaVersion= */
1, KEY_SCHEMA);
RegisteredSchema registeredValueSchema = RegisteredSchema.create(TOPIC_NAME + "value", /* schemaId= */
2, /* schemaVersion= */
1, VALUE_SCHEMA);
expect(schemaManager.getSchema(/* topicName= */
TOPIC_NAME, /* format= */
Optional.of(EmbeddedFormat.AVRO), /* subject= */
Optional.empty(), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.empty(), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.of(RAW_KEY_SCHEMA), /* isKey= */
true)).andStubReturn(registeredKeySchema);
expect(schemaManager.getSchema(/* topicName= */
TOPIC_NAME, /* format= */
Optional.of(EmbeddedFormat.AVRO), /* subject= */
Optional.empty(), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.empty(), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.of(RAW_VALUE_SCHEMA), /* isKey= */
false)).andStubReturn(registeredValueSchema);
expect(schemaManager.getSchema(/* topicName= */
TOPIC_NAME, /* format= */
Optional.empty(), /* subject= */
Optional.empty(), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.of(1), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.empty(), /* isKey= */
true)).andStubReturn(registeredKeySchema);
expect(schemaManager.getSchema(/* topicName= */
TOPIC_NAME, /* format= */
Optional.empty(), /* subject= */
Optional.empty(), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.of(2), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.empty(), /* isKey= */
false)).andStubReturn(registeredValueSchema);
for (int i = 0; i < request.getRecords().size(); i++) {
ProduceRecord record = request.getRecords().get(i);
ByteString serializedKey = ByteString.copyFromUtf8(String.valueOf(record.getKey()));
ByteString serializedValue = ByteString.copyFromUtf8(String.valueOf(record.getValue()));
expect(recordSerializer.serialize(EmbeddedFormat.AVRO, TopicsResourceAvroProduceTest.TOPIC_NAME, Optional.of(registeredKeySchema), record.getKey().orElse(NullNode.getInstance()), /* isKey= */
true)).andReturn(Optional.of(serializedKey));
expect(recordSerializer.serialize(EmbeddedFormat.AVRO, TopicsResourceAvroProduceTest.TOPIC_NAME, Optional.of(registeredValueSchema), record.getValue().orElse(NullNode.getInstance()), /* isKey= */
false)).andReturn(Optional.of(serializedValue));
expect(produceController.produce(/* clusterId= */
eq(""), eq(TopicsResourceAvroProduceTest.TOPIC_NAME), eq(record.getPartition()), /* headers= */
eq(ImmutableMultimap.of()), eq(Optional.of(serializedKey)), eq(Optional.of(serializedValue)), /* timestamp= */
isA(Instant.class))).andReturn(CompletableFuture.completedFuture(ProduceResult.fromRecordMetadata(results.get(i))));
}
replay(schemaManager, recordSerializer, produceController);
Response response = request("/topics/" + TOPIC_NAME, Versions.KAFKA_V2_JSON).post(Entity.entity(request, Versions.KAFKA_V2_JSON_AVRO));
verify(schemaManager, recordSerializer, produceController);
return response;
}
Aggregations