use of org.apache.avro.specific.SpecificDatumWriter in project avro-util by linkedin.
the class Generate19TestResources method main.
public static void main(String[] args) {
if (args == null || args.length != 1) {
System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
System.exit(1);
}
Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
Path by19Root = outputRoot.resolve("by19");
by19.RecordWithUnion outer = new by19.RecordWithUnion();
outer.setF(new by19.InnerUnionRecord());
outer.getF().setF(19);
try {
SpecificDatumWriter<by19.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
Path binaryRecordWithUnion = TestUtil.getNewFile(by19Root, "RecordWithUnion.binary");
BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
Path jsonRecordWithUnion = TestUtil.getNewFile(by19Root, "RecordWithUnion.json");
JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
writer.write(outer, binaryEnc);
binaryEnc.flush();
writer.write(outer, jsonEnc);
jsonEnc.flush();
} catch (Exception e) {
System.err.println("failed to generate payloads");
e.printStackTrace(System.err);
System.exit(1);
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project flink by splunk.
the class GlueSchemaRegistryInputStreamDeserializerTest method testGetSchemaAndDeserializedStream_withoutCompression_succeeds.
/**
* Test whether getSchemaAndDeserializedStream method when compression is not enabled works.
*/
@Test
public void testGetSchemaAndDeserializedStream_withoutCompression_succeeds() throws IOException {
compressionByte = COMPRESSION_DEFAULT_BYTE;
compressionHandler = new GlueSchemaRegistryDefaultCompression();
ByteArrayOutputStream byteArrayOutputStream = buildByteArrayOutputStream(AWSSchemaRegistryConstants.HEADER_VERSION_BYTE, compressionByte);
byte[] bytes = writeToExistingStream(byteArrayOutputStream, encodeData(userDefinedPojo, new SpecificDatumWriter<>(userSchema)));
MutableByteArrayInputStream mutableByteArrayInputStream = new MutableByteArrayInputStream();
mutableByteArrayInputStream.setBuffer(bytes);
glueSchemaRegistryDeserializationFacade = new MockGlueSchemaRegistryDeserializationFacade(bytes, glueSchema, NONE);
GlueSchemaRegistryInputStreamDeserializer glueSchemaRegistryInputStreamDeserializer = new GlueSchemaRegistryInputStreamDeserializer(glueSchemaRegistryDeserializationFacade);
Schema resultSchema = glueSchemaRegistryInputStreamDeserializer.getSchemaAndDeserializedStream(mutableByteArrayInputStream);
assertThat(resultSchema.toString()).isEqualTo(glueSchema.getSchemaDefinition());
}
use of org.apache.avro.specific.SpecificDatumWriter in project flink by splunk.
the class AvroRecordInputFormatTest method writeTestFile.
public static void writeTestFile(File testFile) throws IOException {
ArrayList<CharSequence> stringArray = new ArrayList<>();
stringArray.add(TEST_ARRAY_STRING_1);
stringArray.add(TEST_ARRAY_STRING_2);
ArrayList<Boolean> booleanArray = new ArrayList<>();
booleanArray.add(TEST_ARRAY_BOOLEAN_1);
booleanArray.add(TEST_ARRAY_BOOLEAN_2);
HashMap<CharSequence, Long> longMap = new HashMap<>();
longMap.put(TEST_MAP_KEY1, TEST_MAP_VALUE1);
longMap.put(TEST_MAP_KEY2, TEST_MAP_VALUE2);
Address addr = new Address();
addr.setNum(TEST_NUM);
addr.setStreet(TEST_STREET);
addr.setCity(TEST_CITY);
addr.setState(TEST_STATE);
addr.setZip(TEST_ZIP);
User user1 = new User();
user1.setName(TEST_NAME);
user1.setFavoriteNumber(256);
user1.setTypeDoubleTest(123.45d);
user1.setTypeBoolTest(true);
user1.setTypeArrayString(stringArray);
user1.setTypeArrayBoolean(booleanArray);
user1.setTypeEnum(TEST_ENUM_COLOR);
user1.setTypeMap(longMap);
user1.setTypeNested(addr);
user1.setTypeBytes(ByteBuffer.allocate(10));
user1.setTypeDate(LocalDate.parse("2014-03-01"));
user1.setTypeTimeMillis(LocalTime.parse("12:12:12"));
user1.setTypeTimeMicros(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS));
user1.setTypeTimestampMillis(Instant.parse("2014-03-01T12:12:12.321Z"));
user1.setTypeTimestampMicros(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS));
// 20.00
user1.setTypeDecimalBytes(ByteBuffer.wrap(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()));
// 20.00
user1.setTypeDecimalFixed(new Fixed2(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()));
// Construct via builder
User user2 = User.newBuilder().setName("Charlie").setFavoriteColor("blue").setFavoriteNumber(null).setTypeBoolTest(false).setTypeDoubleTest(1.337d).setTypeNullTest(null).setTypeLongTest(1337L).setTypeArrayString(new ArrayList<>()).setTypeArrayBoolean(new ArrayList<>()).setTypeNullableArray(null).setTypeEnum(Colors.RED).setTypeMap(new HashMap<>()).setTypeFixed(null).setTypeUnion(null).setTypeNested(Address.newBuilder().setNum(TEST_NUM).setStreet(TEST_STREET).setCity(TEST_CITY).setState(TEST_STATE).setZip(TEST_ZIP).build()).setTypeBytes(ByteBuffer.allocate(10)).setTypeDate(LocalDate.parse("2014-03-01")).setTypeTimeMillis(LocalTime.parse("12:12:12")).setTypeTimeMicros(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS)).setTypeTimestampMillis(Instant.parse("2014-03-01T12:12:12.321Z")).setTypeTimestampMicros(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS)).setTypeDecimalBytes(ByteBuffer.wrap(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray())).setTypeDecimalFixed(new Fixed2(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray())).build();
DatumWriter<User> userDatumWriter = new SpecificDatumWriter<>(User.class);
DataFileWriter<User> dataFileWriter = new DataFileWriter<>(userDatumWriter);
dataFileWriter.create(user1.getSchema(), testFile);
dataFileWriter.append(user1);
dataFileWriter.append(user2);
dataFileWriter.close();
}
use of org.apache.avro.specific.SpecificDatumWriter in project flink by splunk.
the class AvroRowDataDeSerializationSchemaTest method testSpecificType.
@Test
void testSpecificType() throws Exception {
LogicalTimeRecord record = new LogicalTimeRecord();
Instant timestamp = Instant.parse("2010-06-30T01:20:20Z");
record.setTypeTimestampMillis(timestamp);
record.setTypeDate(LocalDate.parse("2014-03-01"));
record.setTypeTimeMillis(LocalTime.parse("12:12:12"));
SpecificDatumWriter<LogicalTimeRecord> datumWriter = new SpecificDatumWriter<>(LogicalTimeRecord.class);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
datumWriter.write(record, encoder);
encoder.flush();
byte[] input = byteArrayOutputStream.toByteArray();
DataType dataType = ROW(FIELD("type_timestamp_millis", TIMESTAMP(3).notNull()), FIELD("type_date", DATE().notNull()), FIELD("type_time_millis", TIME(3).notNull())).notNull();
AvroRowDataSerializationSchema serializationSchema = createSerializationSchema(dataType);
AvroRowDataDeserializationSchema deserializationSchema = createDeserializationSchema(dataType);
RowData rowData = deserializationSchema.deserialize(input);
byte[] output = serializationSchema.serialize(rowData);
RowData rowData2 = deserializationSchema.deserialize(output);
assertThat(rowData2).isEqualTo(rowData);
assertThat(rowData.getTimestamp(0, 3).toInstant()).isEqualTo(timestamp);
assertThat(DataFormatConverters.LocalDateConverter.INSTANCE.toExternal(rowData.getInt(1)).toString()).isEqualTo("2014-03-01");
assertThat(DataFormatConverters.LocalTimeConverter.INSTANCE.toExternal(rowData.getInt(2)).toString()).isEqualTo("12:12:12");
}
use of org.apache.avro.specific.SpecificDatumWriter in project zebedee by ONSdigital.
the class AvroSerializer method serialize.
/**
* Serialize an object to avro format according to the object's schema
*
* @param s Unused string that takes the kafka topic
* @param t Type to be serialised
* @return
*/
@Override
public byte[] serialize(String s, T t) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
DatumWriter<T> writer = new SpecificDatumWriter<>(targetType.newInstance().getSchema());
writer.write(t, encoder);
encoder.flush();
} catch (Exception ex) {
throw new SerializationException("Can't serialize data '", ex);
}
return out.toByteArray();
}
Aggregations