use of org.apache.flink.formats.avro.generated.User in project flink by apache.
the class AvroOutputFormatTest method output.
private void output(final AvroOutputFormat<User> outputFormat) throws IOException {
outputFormat.configure(new Configuration());
outputFormat.open(1, 1);
for (int i = 0; i < 100; i++) {
User user = new User();
user.setName("testUser");
user.setFavoriteNumber(1);
user.setFavoriteColor("blue");
user.setTypeBoolTest(true);
user.setTypeArrayString(Collections.emptyList());
user.setTypeArrayBoolean(Collections.emptyList());
user.setTypeEnum(Colors.BLUE);
user.setTypeMap(Collections.emptyMap());
user.setTypeBytes(ByteBuffer.allocate(10));
user.setTypeDate(LocalDate.parse("2014-03-01"));
user.setTypeTimeMillis(LocalTime.parse("12:12:12"));
user.setTypeTimeMicros(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS));
user.setTypeTimestampMillis(Instant.parse("2014-03-01T12:12:12.321Z"));
user.setTypeTimestampMicros(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS));
// 20.00
user.setTypeDecimalBytes(ByteBuffer.wrap(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()));
// 20.00
user.setTypeDecimalFixed(new Fixed2(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray()));
outputFormat.writeRecord(user);
}
outputFormat.close();
}
use of org.apache.flink.formats.avro.generated.User in project flink by apache.
the class AvroRecordInputFormatTest method testDeserializeToSpecificType.
/**
* This test validates proper serialization with specific (generated POJO) types.
*/
@Test
public void testDeserializeToSpecificType() throws IOException {
DatumReader<User> datumReader = new SpecificDatumReader<>(userSchema);
try (FileReader<User> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
User rec = dataFileReader.next();
// check if record has been read correctly
assertNotNull(rec);
assertEquals("name not equal", TEST_NAME, rec.get("name").toString());
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), rec.get("type_enum").toString());
// now serialize it with our framework:
ExecutionConfig ec = new ExecutionConfig();
TypeInformation<User> te = TypeExtractor.createTypeInfo(User.class);
assertEquals(AvroTypeInfo.class, te.getClass());
TypeSerializer<User> tser = te.createSerializer(ec);
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(out)) {
tser.serialize(rec, outView);
}
User newRec;
try (DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper(new ByteArrayInputStream(out.toByteArray()))) {
newRec = tser.deserialize(inView);
}
// check if it is still the same
assertNotNull(newRec);
assertEquals("name not equal", TEST_NAME, newRec.getName().toString());
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), newRec.getTypeEnum().toString());
}
}
use of org.apache.flink.formats.avro.generated.User in project flink by apache.
the class AvroSerializerSnapshotTest method recordSerializedShouldBeDeserializeWithTheResortedSerializer.
@Test
public void recordSerializedShouldBeDeserializeWithTheResortedSerializer() throws IOException {
// user is an avro generated test object.
final User user = TestDataGenerator.generateRandomUser(new Random());
final AvroSerializer<User> originalSerializer = new AvroSerializer<>(User.class);
//
// first serialize the record
//
ByteBuffer serializedUser = serialize(originalSerializer, user);
//
// then restore a serializer from the snapshot
//
TypeSerializer<User> restoredSerializer = originalSerializer.snapshotConfiguration().restoreSerializer();
//
// now deserialize the user with the resorted serializer.
//
User restoredUser = deserialize(restoredSerializer, serializedUser);
assertThat(user, is(restoredUser));
}
use of org.apache.flink.formats.avro.generated.User in project flink by apache.
the class AvroTypesITCase method testAvroToAvro.
@Test
public void testAvroToAvro() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
DataStream<User> ds = testData(env);
Table t = tEnv.fromDataStream(ds, selectFields(ds));
Table result = t.select($("*"));
List<User> results = CollectionUtil.iteratorToList(DataStreamUtils.collect(tEnv.toAppendStream(result, User.class)));
List<User> expected = Arrays.asList(USER_1, USER_2, USER_3);
assertEquals(expected, results);
}
use of org.apache.flink.formats.avro.generated.User in project flink by apache.
the class AvroTypesITCase method testAvroStringAccess.
@Test
public void testAvroStringAccess() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
DataStream<User> ds = testData(env);
Table t = tEnv.fromDataStream(ds, selectFields(ds));
Table result = t.select($("name"));
List<Utf8> results = CollectionUtil.iteratorToList(result.execute().collect()).stream().map(row -> (Utf8) row.getField(0)).collect(Collectors.toList());
String expected = "Charlie\n" + "Terminator\n" + "Whatever";
TestBaseUtils.compareResultAsText(results, expected);
}
Aggregations