use of org.apache.avro.generic.GenericRecordBuilder in project hazelcast by hazelcast.
the class SqlAvroTest method test_topLevelFieldExtraction.
@Test
public void test_topLevelFieldExtraction() {
String name = createRandomTopic();
sqlService.execute("CREATE MAPPING " + name + " (" + "id INT EXTERNAL NAME \"__key.id\"" + ", name VARCHAR" + ") TYPE " + KafkaSqlConnector.TYPE_NAME + ' ' + "OPTIONS ( " + '\'' + OPTION_KEY_FORMAT + "'='" + AVRO_FORMAT + '\'' + ", '" + OPTION_VALUE_FORMAT + "'='" + AVRO_FORMAT + '\'' + ", 'bootstrap.servers'='" + kafkaTestSupport.getBrokerConnectionString() + '\'' + ", 'schema.registry.url'='" + schemaRegistry.getURI() + '\'' + ", 'auto.offset.reset'='earliest'" + ")");
sqlService.execute("INSERT INTO " + name + " VALUES (1, 'Alice')");
assertRowsEventuallyInAnyOrder("SELECT __key, this FROM " + name, singletonList(new Row(new GenericRecordBuilder(intSchema("id")).set("id", 1).build(), new GenericRecordBuilder(stringSchema("name")).set("name", "Alice").build())));
}
use of org.apache.avro.generic.GenericRecordBuilder in project hazelcast by hazelcast.
the class AvroUpsertTargetTest method when_typeIsObject_then_allValuesAreAllowed.
@Test
@Parameters(method = "values")
public void when_typeIsObject_then_allValuesAreAllowed(Object value, Object expected) {
Schema schema = SchemaBuilder.record("name").fields().name("object").type().unionOf().nullType().and().booleanType().and().intType().and().longType().and().floatType().and().doubleType().and().stringType().endUnion().nullDefault().endRecord();
UpsertTarget target = new AvroUpsertTarget(schema.toString());
UpsertInjector injector = target.createInjector("object", QueryDataType.OBJECT);
target.init();
injector.set(value);
Object record = target.conclude();
assertThat(record).isEqualTo(new GenericRecordBuilder(schema).set("object", expected).build());
}
use of org.apache.avro.generic.GenericRecordBuilder in project cdap by caskdata.
the class HiveExploreServiceStreamTest method createAvroEvent.
private byte[] createAvroEvent(org.apache.avro.Schema schema, Object... values) throws IOException {
GenericRecordBuilder builder = new GenericRecordBuilder(schema);
int i = 0;
for (org.apache.avro.Schema.Field field : schema.getFields()) {
builder.set(field.name(), values[i]);
i++;
}
GenericRecord record = builder.build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();
out.close();
return out.toByteArray();
}
use of org.apache.avro.generic.GenericRecordBuilder in project incubator-gobblin by apache.
the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializerForSchemaEvolution.
@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L);
mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().optionalString(TEST_FIELD_NAME2).endRecord();
GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();
SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);
Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);
KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);
ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");
}
use of org.apache.avro.generic.GenericRecordBuilder in project parquet-mr by apache.
the class TestReadWrite method testMapWithUtf8Key.
@Test
public void testMapWithUtf8Key() throws Exception {
Schema schema = new Schema.Parser().parse(Resources.getResource("map.avsc").openStream());
Path file = new Path(createTempFile().getPath());
try (ParquetWriter<GenericRecord> writer = AvroParquetWriter.<GenericRecord>builder(file).withSchema(schema).withConf(testConf).build()) {
// Write a record with a map with Utf8 keys.
GenericData.Record record = new GenericRecordBuilder(schema).set("mymap", ImmutableMap.of(new Utf8("a"), 1, new Utf8("b"), 2)).build();
writer.write(record);
}
try (AvroParquetReader<GenericRecord> reader = new AvroParquetReader<>(testConf, file)) {
GenericRecord nextRecord = reader.read();
assertNotNull(nextRecord);
assertEquals(ImmutableMap.of(str("a"), 1, str("b"), 2), nextRecord.get("mymap"));
}
}
Aggregations