use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.
the class RocksDBFoldingState method get.
@Override
public ACC get() {
try {
writeCurrentKeyWithGroupAndNamespace();
byte[] key = keySerializationStream.toByteArray();
byte[] valueBytes = backend.db.get(columnFamily, key);
if (valueBytes == null) {
return null;
}
return valueSerializer.deserialize(new DataInputViewStreamWrapper(new ByteArrayInputStreamWithPos(valueBytes)));
} catch (IOException | RocksDBException e) {
throw new RuntimeException("Error while retrieving data from RocksDB", e);
}
}
use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.
the class RocksDBFoldingState method add.
@Override
public void add(T value) throws IOException {
try {
writeCurrentKeyWithGroupAndNamespace();
byte[] key = keySerializationStream.toByteArray();
byte[] valueBytes = backend.db.get(columnFamily, key);
DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(keySerializationStream);
if (valueBytes == null) {
keySerializationStream.reset();
valueSerializer.serialize(foldFunction.fold(stateDesc.getDefaultValue(), value), out);
backend.db.put(columnFamily, writeOptions, key, keySerializationStream.toByteArray());
} else {
ACC oldValue = valueSerializer.deserialize(new DataInputViewStreamWrapper(new ByteArrayInputStreamWithPos(valueBytes)));
ACC newValue = foldFunction.fold(oldValue, value);
keySerializationStream.reset();
valueSerializer.serialize(newValue, out);
backend.db.put(columnFamily, writeOptions, key, keySerializationStream.toByteArray());
}
} catch (Exception e) {
throw new RuntimeException("Error while adding data to RocksDB", e);
}
}
use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.
the class RocksDBMapState method deserializeUserKey.
private UK deserializeUserKey(byte[] rawKeyBytes) throws IOException {
ByteArrayInputStreamWithPos bais = new ByteArrayInputStreamWithPos(rawKeyBytes);
DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais);
readKeyWithGroupAndNamespace(bais, in);
return userKeySerializer.deserialize(in);
}
use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.
the class RocksDBReducingState method mergeNamespaces.
@Override
public void mergeNamespaces(N target, Collection<N> sources) throws Exception {
if (sources == null || sources.isEmpty()) {
return;
}
// cache key and namespace
final K key = backend.getCurrentKey();
final int keyGroup = backend.getCurrentKeyGroupIndex();
try {
V current = null;
// merge the sources to the target
for (N source : sources) {
if (source != null) {
writeKeyWithGroupAndNamespace(keyGroup, key, source, keySerializationStream, keySerializationDataOutputView);
final byte[] sourceKey = keySerializationStream.toByteArray();
final byte[] valueBytes = backend.db.get(columnFamily, sourceKey);
if (valueBytes != null) {
V value = valueSerializer.deserialize(new DataInputViewStreamWrapper(new ByteArrayInputStreamWithPos(valueBytes)));
if (current != null) {
current = reduceFunction.reduce(current, value);
} else {
current = value;
}
}
}
}
// if something came out of merging the sources, merge it or write it to the target
if (current != null) {
// create the target full-binary-key
writeKeyWithGroupAndNamespace(keyGroup, key, target, keySerializationStream, keySerializationDataOutputView);
final byte[] targetKey = keySerializationStream.toByteArray();
final byte[] targetValueBytes = backend.db.get(columnFamily, targetKey);
if (targetValueBytes != null) {
// target also had a value, merge
V value = valueSerializer.deserialize(new DataInputViewStreamWrapper(new ByteArrayInputStreamWithPos(targetValueBytes)));
current = reduceFunction.reduce(current, value);
}
// serialize the resulting value
keySerializationStream.reset();
valueSerializer.serialize(current, keySerializationDataOutputView);
// write the resulting value
backend.db.put(columnFamily, writeOptions, targetKey, keySerializationStream.toByteArray());
}
} catch (Exception e) {
throw new Exception("Error while merging state in RocksDB", e);
}
}
use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.
the class VersionedIOWriteableTest method testReadCompatibleVersion.
@Test
public void testReadCompatibleVersion() throws Exception {
String payload = "test";
TestWriteable testWriteable = new TestWriteable(1, payload);
byte[] serialized;
try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
testWriteable.write(new DataOutputViewStreamWrapper(out));
serialized = out.toByteArray();
}
testWriteable = new TestWriteable(2) {
@Override
public boolean isCompatibleVersion(int version) {
return getVersion() >= version;
}
};
try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
testWriteable.read(new DataInputViewStreamWrapper(in));
}
Assert.assertEquals(payload, testWriteable.getData());
}
Aggregations