use of example.avro.User in project spring-cloud-stream by spring-cloud.
the class AvroMessageConverterSerializationTests method sourceWriteSameVersion.
@Test
public void sourceWriteSameVersion() throws Exception {
User specificRecord = new User();
specificRecord.setName("joe");
Schema v1 = new Schema.Parser().parse(AvroMessageConverterSerializationTests.class.getClassLoader().getResourceAsStream("schemas/user.avsc"));
GenericRecord genericRecord = new GenericData.Record(v1);
genericRecord.put("name", "joe");
SchemaRegistryClient client = new DefaultSchemaRegistryClient();
AvroSchemaRegistryClientMessageConverter converter = new AvroSchemaRegistryClientMessageConverter(client, new NoOpCacheManager());
converter.setSubjectNamingStrategy(new DefaultSubjectNamingStrategy());
converter.setDynamicSchemaGenerationEnabled(false);
converter.afterPropertiesSet();
Message specificMessage = converter.toMessage(specificRecord, new MutableMessageHeaders(Collections.<String, Object>emptyMap()), MimeTypeUtils.parseMimeType("application/*+avro"));
SchemaReference specificRef = extractSchemaReference(MimeTypeUtils.parseMimeType(specificMessage.getHeaders().get("contentType").toString()));
Message genericMessage = converter.toMessage(genericRecord, new MutableMessageHeaders(Collections.<String, Object>emptyMap()), MimeTypeUtils.parseMimeType("application/*+avro"));
SchemaReference genericRef = extractSchemaReference(MimeTypeUtils.parseMimeType(genericMessage.getHeaders().get("contentType").toString()));
Assert.assertEquals(genericRef, specificRef);
Assert.assertEquals(1, genericRef.getVersion());
}
use of example.avro.User in project spring-cloud-stream by spring-cloud.
the class AvroMessageConverterSerializationTests method testOriginalContentTypeHeaderOnly.
@Test
public void testOriginalContentTypeHeaderOnly() throws Exception {
User specificRecord = new User();
specificRecord.setName("joe");
Schema v1 = new Schema.Parser().parse(AvroMessageConverterSerializationTests.class.getClassLoader().getResourceAsStream("schemas/user.avsc"));
GenericRecord genericRecord = new GenericData.Record(v1);
genericRecord.put("name", "joe");
SchemaRegistryClient client = new DefaultSchemaRegistryClient();
client.register("user", "avro", v1.toString());
AvroSchemaRegistryClientMessageConverter converter = new AvroSchemaRegistryClientMessageConverter(client, new NoOpCacheManager());
converter.setDynamicSchemaGenerationEnabled(false);
converter.afterPropertiesSet();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DatumWriter<User> writer = new SpecificDatumWriter<>(User.class);
Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
writer.write(specificRecord, encoder);
encoder.flush();
Message source = MessageBuilder.withPayload(baos.toByteArray()).setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_OCTET_STREAM).setHeader(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE, "application/vnd.user.v1+avro").build();
Object converted = converter.fromMessage(source, User.class);
Assert.assertNotNull(converted);
Assert.assertEquals(specificRecord.getName().toString(), ((User) converted).getName().toString());
}
use of example.avro.User in project flink by apache.
the class TestAvroConsumerConfluent method main.
public static void main(String[] args) throws Exception {
// parse input arguments
final ParameterTool parameterTool = ParameterTool.fromArgs(args);
if (parameterTool.getNumberOfParameters() < 6) {
System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-string-topic <topic> --output-avro-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--schema-registry-url <confluent schema registry> --group.id <some id>");
return;
}
Properties config = new Properties();
config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers"));
config.setProperty("group.id", parameterTool.getRequired("group.id"));
String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<User> input = env.addSource(new FlinkKafkaConsumer<>(parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest());
SingleOutputStreamOperator<String> mapToString = input.map((MapFunction<User, String>) SpecificRecordBase::toString);
KafkaSink<String> stringSink = KafkaSink.<String>builder().setBootstrapServers(config.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)).setRecordSerializer(KafkaRecordSerializationSchema.builder().setValueSerializationSchema(new SimpleStringSchema()).setTopic(parameterTool.getRequired("output-string-topic")).build()).setKafkaProducerConfig(config).build();
mapToString.sinkTo(stringSink);
KafkaSink<User> avroSink = KafkaSink.<User>builder().setBootstrapServers(config.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)).setRecordSerializer(KafkaRecordSerializationSchema.builder().setValueSerializationSchema(ConfluentRegistryAvroSerializationSchema.forSpecific(User.class, parameterTool.getRequired("output-subject"), schemaRegistryUrl)).setTopic(parameterTool.getRequired("output-avro-topic")).build()).build();
input.sinkTo(avroSink);
env.execute("Kafka Confluent Schema Registry AVRO Example");
}
Aggregations