use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class OperatorStateOutputCheckpointStreamTest method writeAllTestKeyGroups.
private OperatorStateHandle writeAllTestKeyGroups(OperatorStateCheckpointOutputStream stream, int numPartitions) throws Exception {
DataOutputView dov = new DataOutputViewStreamWrapper(stream);
for (int i = 0; i < numPartitions; ++i) {
Assert.assertEquals(i, stream.getNumberOfPartitions());
stream.startNewPartition();
dov.writeInt(i);
}
return stream.closeAndGetHandle();
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class NFA method writeComputationState.
private void writeComputationState(final ComputationState<T> computationState, final ObjectOutputStream oos) throws IOException {
oos.writeObject(computationState.getState());
oos.writeLong(computationState.getTimestamp());
oos.writeObject(computationState.getVersion());
oos.writeLong(computationState.getStartTimestamp());
if (computationState.getEvent() == null) {
// write that we don't have an event associated
oos.writeBoolean(false);
} else {
// write that we have an event associated
oos.writeBoolean(true);
DataOutputViewStreamWrapper output = new DataOutputViewStreamWrapper(oos);
nonDuplicatingTypeSerializer.serialize(computationState.getEvent(), output);
}
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class AvroRecordInputFormatTest method testDeserializeToSpecificType.
/**
* This test validates proper serialization with specific (generated POJO) types.
*/
@Test
public void testDeserializeToSpecificType() throws IOException {
DatumReader<User> datumReader = new SpecificDatumReader<User>(userSchema);
try (FileReader<User> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
User rec = dataFileReader.next();
// check if record has been read correctly
assertNotNull(rec);
assertEquals("name not equal", TEST_NAME, rec.get("name").toString());
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), rec.get("type_enum").toString());
// now serialize it with our framework:
ExecutionConfig ec = new ExecutionConfig();
TypeInformation<User> te = TypeExtractor.createTypeInfo(User.class);
Assert.assertEquals(AvroTypeInfo.class, te.getClass());
TypeSerializer<User> tser = te.createSerializer(ec);
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(out)) {
tser.serialize(rec, outView);
}
User newRec;
try (DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper(new ByteArrayInputStream(out.toByteArray()))) {
newRec = tser.deserialize(inView);
}
// check if it is still the same
assertNotNull(newRec);
assertEquals("name not equal", TEST_NAME, newRec.getName().toString());
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), newRec.getTypeEnum().toString());
}
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class AvroRecordInputFormatTest method testDeserializeToGenericType.
/**
* Test if the Flink serialization is able to properly process GenericData.Record types.
* Usually users of Avro generate classes (POJOs) from Avro schemas.
* However, if generated classes are not available, one can also use GenericData.Record.
* It is an untyped key-value record which is using a schema to validate the correctness of the data.
*
* It is not recommended to use GenericData.Record with Flink. Use generated POJOs instead.
*/
@Test
public void testDeserializeToGenericType() throws IOException {
DatumReader<GenericData.Record> datumReader = new GenericDatumReader<>(userSchema);
try (FileReader<GenericData.Record> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
// initialize Record by reading it from disk (thats easier than creating it by hand)
GenericData.Record rec = new GenericData.Record(userSchema);
dataFileReader.next(rec);
// check if record has been read correctly
assertNotNull(rec);
assertEquals("name not equal", TEST_NAME, rec.get("name").toString());
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), rec.get("type_enum").toString());
// it is null for the first record.
assertEquals(null, rec.get("type_long_test"));
// now serialize it with our framework:
TypeInformation<GenericData.Record> te = TypeExtractor.createTypeInfo(GenericData.Record.class);
ExecutionConfig ec = new ExecutionConfig();
Assert.assertEquals(GenericTypeInfo.class, te.getClass());
Serializers.recursivelyRegisterType(te.getTypeClass(), ec, new HashSet<Class<?>>());
TypeSerializer<GenericData.Record> tser = te.createSerializer(ec);
Assert.assertEquals(1, ec.getDefaultKryoSerializerClasses().size());
Assert.assertTrue(ec.getDefaultKryoSerializerClasses().containsKey(Schema.class) && ec.getDefaultKryoSerializerClasses().get(Schema.class).equals(Serializers.AvroSchemaSerializer.class));
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(out)) {
tser.serialize(rec, outView);
}
GenericData.Record newRec;
try (DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper(new ByteArrayInputStream(out.toByteArray()))) {
newRec = tser.deserialize(inView);
}
// check if it is still the same
assertNotNull(newRec);
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), newRec.get("type_enum").toString());
assertEquals("name not equal", TEST_NAME, newRec.get("name").toString());
assertEquals(null, newRec.get("type_long_test"));
}
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class AbstractRocksDBState method getSerializedValue.
@Override
@SuppressWarnings("unchecked")
public byte[] getSerializedValue(byte[] serializedKeyAndNamespace) throws Exception {
Preconditions.checkNotNull(serializedKeyAndNamespace, "Serialized key and namespace");
//TODO make KvStateRequestSerializer key-group aware to save this round trip and key-group computation
Tuple2<K, N> des = KvStateRequestSerializer.<K, N>deserializeKeyAndNamespace(serializedKeyAndNamespace, backend.getKeySerializer(), namespaceSerializer);
int keyGroup = KeyGroupRangeAssignment.assignToKeyGroup(des.f0, backend.getNumberOfKeyGroups());
// we cannot reuse the keySerializationStream member since this method
// is called concurrently to the other ones and it may thus contain garbage
ByteArrayOutputStreamWithPos tmpKeySerializationStream = new ByteArrayOutputStreamWithPos(128);
DataOutputViewStreamWrapper tmpKeySerializationDateDataOutputView = new DataOutputViewStreamWrapper(tmpKeySerializationStream);
writeKeyWithGroupAndNamespace(keyGroup, des.f0, des.f1, tmpKeySerializationStream, tmpKeySerializationDateDataOutputView);
return backend.db.get(columnFamily, tmpKeySerializationStream.toByteArray());
}
Aggregations