use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class PassThroughPythonStreamGroupWindowAggregateOperator method open.
@Override
public void open() throws Exception {
super.open();
windowBaos = new ByteArrayOutputStreamWithPos();
windowBaosWrapper = new DataOutputViewStreamWrapper(windowBaos);
reusePythonRowData = new UpdatableRowData(GenericRowData.of(NORMAL_RECORD, null, null), 3);
reusePythonTimerRowData = new UpdatableRowData(GenericRowData.of(TRIGGER_TIMER, null, null), 3);
reusePythonTimerData = new UpdatableRowData(GenericRowData.of(0, null, null, null), 4);
reuseJoinedRow = new JoinedRowData();
windowAggResult = new JoinedRowData();
reusePythonTimerRowData.setField(2, reusePythonTimerData);
windowAccumulateData = new HashMap<>();
windowRetractData = new HashMap<>();
mockPythonInternalService = (InternalTimerServiceImpl<K, TimeWindow>) getInternalTimerService("python-window-timers", windowSerializer, this.mockPythonWindowOperator);
this.groupKeyProjection = createProjection("GroupKey", grouping);
int inputFieldIndex = (int) aggregateFunction.getInputs()[0];
this.aggExtracter = input -> {
GenericRowData aggResult = new GenericRowData(1);
aggResult.setField(0, input.getLong(inputFieldIndex));
return aggResult;
};
this.windowExtractor = window -> {
GenericRowData windowProperty = new GenericRowData(namedProperties.length);
for (int i = 0; i < namedProperties.length; i++) {
switch(namedProperties[i]) {
case WINDOW_START:
windowProperty.setField(i, getShiftEpochMills(window.getStart()));
break;
case WINDOW_END:
windowProperty.setField(i, getShiftEpochMills(window.getEnd()));
break;
case ROW_TIME_ATTRIBUTE:
windowProperty.setField(i, getShiftEpochMills(window.getEnd() - 1));
break;
case PROC_TIME_ATTRIBUTE:
windowProperty.setField(i, -1L);
}
}
return windowProperty;
};
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class KryoSerializerCompatibilityTest method testMigrationStrategyWithDifferentKryoType.
/**
* Verifies that reconfiguration result is INCOMPATIBLE if data type has changed.
*/
@Test
public void testMigrationStrategyWithDifferentKryoType() throws Exception {
KryoSerializer<TestClassA> kryoSerializerForA = new KryoSerializer<>(TestClassA.class, new ExecutionConfig());
// snapshot configuration and serialize to bytes
TypeSerializerSnapshot kryoSerializerConfigSnapshot = kryoSerializerForA.snapshotConfiguration();
byte[] serializedConfig;
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
TypeSerializerSnapshotSerializationUtil.writeSerializerSnapshot(new DataOutputViewStreamWrapper(out), kryoSerializerConfigSnapshot, kryoSerializerForA);
serializedConfig = out.toByteArray();
}
KryoSerializer<TestClassB> kryoSerializerForB = new KryoSerializer<>(TestClassB.class, new ExecutionConfig());
// read configuration again from bytes
try (ByteArrayInputStream in = new ByteArrayInputStream(serializedConfig)) {
kryoSerializerConfigSnapshot = TypeSerializerSnapshotSerializationUtil.readSerializerSnapshot(new DataInputViewStreamWrapper(in), Thread.currentThread().getContextClassLoader(), kryoSerializerForB);
}
@SuppressWarnings("unchecked") TypeSerializerSchemaCompatibility<TestClassB> compatResult = kryoSerializerConfigSnapshot.resolveSchemaCompatibility(kryoSerializerForB);
assertTrue(compatResult.isIncompatible());
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class VersionedIOWriteableTest method testReadCompatibleVersion.
@Test
public void testReadCompatibleVersion() throws Exception {
String payload = "test";
TestWriteable testWriteable = new TestWriteable(1, payload);
byte[] serialized;
try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
testWriteable.write(new DataOutputViewStreamWrapper(out));
serialized = out.toByteArray();
}
testWriteable = new TestWriteable(2) {
@Override
public int[] getCompatibleVersions() {
return new int[] { 1, 2 };
}
};
try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
testWriteable.read(new DataInputViewStreamWrapper(in));
}
Assert.assertEquals(payload, testWriteable.getData());
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class AvroRecordInputFormatTest method testDeserializeToGenericType.
/**
* Test if the Flink serialization is able to properly process GenericData.Record types. Usually
* users of Avro generate classes (POJOs) from Avro schemas. However, if generated classes are
* not available, one can also use GenericData.Record. It is an untyped key-value record which
* is using a schema to validate the correctness of the data.
*
* <p>It is not recommended to use GenericData.Record with Flink. Use generated POJOs instead.
*/
@Test
public void testDeserializeToGenericType() throws IOException {
DatumReader<GenericData.Record> datumReader = new GenericDatumReader<>(userSchema);
try (FileReader<GenericData.Record> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
// initialize Record by reading it from disk (that's easier than creating it by hand)
GenericData.Record rec = new GenericData.Record(userSchema);
dataFileReader.next(rec);
// check if record has been read correctly
assertNotNull(rec);
assertEquals("name not equal", TEST_NAME, rec.get("name").toString());
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), rec.get("type_enum").toString());
// it is null for the first record.
assertEquals(null, rec.get("type_long_test"));
// now serialize it with our framework:
TypeInformation<GenericData.Record> te = TypeExtractor.createTypeInfo(GenericData.Record.class);
ExecutionConfig ec = new ExecutionConfig();
assertEquals(GenericTypeInfo.class, te.getClass());
Serializers.recursivelyRegisterType(te.getTypeClass(), ec, new HashSet<>());
TypeSerializer<GenericData.Record> tser = te.createSerializer(ec);
assertEquals(1, ec.getDefaultKryoSerializerClasses().size());
assertTrue(ec.getDefaultKryoSerializerClasses().containsKey(Schema.class) && ec.getDefaultKryoSerializerClasses().get(Schema.class).equals(AvroKryoSerializerUtils.AvroSchemaSerializer.class));
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(out)) {
tser.serialize(rec, outView);
}
GenericData.Record newRec;
try (DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper(new ByteArrayInputStream(out.toByteArray()))) {
newRec = tser.deserialize(inView);
}
// check if it is still the same
assertNotNull(newRec);
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), newRec.get("type_enum").toString());
assertEquals("name not equal", TEST_NAME, newRec.get("name").toString());
assertEquals(null, newRec.get("type_long_test"));
}
}
use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.
the class CollectionInputFormat method writeObject.
// --------------------------------------------------------------------------------------------
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
final int size = dataSet.size();
out.writeInt(size);
if (size > 0) {
DataOutputViewStreamWrapper wrapper = new DataOutputViewStreamWrapper(out);
for (T element : dataSet) {
serializer.serialize(element, wrapper);
}
}
}
Aggregations