use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class RocksDBListState method get.
@Override
public Iterable<V> get() {
try {
writeCurrentKeyWithGroupAndNamespace();
byte[] key = keySerializationStream.toByteArray();
byte[] valueBytes = backend.db.get(columnFamily, key);
if (valueBytes == null) {
return null;
}
ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais);
List<V> result = new ArrayList<>();
while (in.available() > 0) {
result.add(valueSerializer.deserialize(in));
if (in.available() > 0) {
in.readByte();
}
}
return result;
} catch (IOException | RocksDBException e) {
throw new RuntimeException("Error while retrieving data from RocksDB", e);
}
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class RocksDBMapState method deserializeUserValue.
private UV deserializeUserValue(byte[] rawValueBytes) throws IOException {
ByteArrayInputStreamWithPos bais = new ByteArrayInputStreamWithPos(rawValueBytes);
DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais);
boolean isNull = in.readBoolean();
return isNull ? null : userValueSerializer.deserialize(in);
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class RocksDBReducingState method add.
@Override
public void add(V value) throws IOException {
try {
writeCurrentKeyWithGroupAndNamespace();
byte[] key = keySerializationStream.toByteArray();
byte[] valueBytes = backend.db.get(columnFamily, key);
DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(keySerializationStream);
if (valueBytes == null) {
keySerializationStream.reset();
valueSerializer.serialize(value, out);
backend.db.put(columnFamily, writeOptions, key, keySerializationStream.toByteArray());
} else {
V oldValue = valueSerializer.deserialize(new DataInputViewStreamWrapper(new ByteArrayInputStream(valueBytes)));
V newValue = reduceFunction.reduce(oldValue, value);
keySerializationStream.reset();
valueSerializer.serialize(newValue, out);
backend.db.put(columnFamily, writeOptions, key, keySerializationStream.toByteArray());
}
} catch (Exception e) {
throw new RuntimeException("Error while adding data to RocksDB", e);
}
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class BinaryInputFormat method open.
@Override
public void open(FileInputSplit split) throws IOException {
super.open(split);
this.blockInfo = this.createAndReadBlockInfo();
// We set the size of the BlockBasedInput to splitLength as each split contains one block.
// After reading the block info, we seek in the file to the correct position.
this.readRecords = 0;
this.stream.seek(this.splitStart + this.blockInfo.getFirstRecordStart());
this.blockBasedInput = new BlockBasedInput(this.stream, (int) blockInfo.getFirstRecordStart(), this.splitLength);
this.dataInputStream = new DataInputViewStreamWrapper(blockBasedInput);
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class BinaryInputFormat method createStatistics.
/**
* Fill in the statistics. The last modification time and the total input size are prefilled.
*
* @param files
* The files that are associated with this block input format.
* @param stats
* The pre-filled statistics.
*/
protected SequentialStatistics createStatistics(List<FileStatus> files, FileBaseStatistics stats) throws IOException {
if (files.isEmpty()) {
return null;
}
BlockInfo blockInfo = new BlockInfo();
long totalCount = 0;
for (FileStatus file : files) {
// invalid file
if (file.getLen() < blockInfo.getInfoSize()) {
continue;
}
FileSystem fs = file.getPath().getFileSystem();
try (FSDataInputStream fdis = fs.open(file.getPath(), blockInfo.getInfoSize())) {
fdis.seek(file.getLen() - blockInfo.getInfoSize());
blockInfo.read(new DataInputViewStreamWrapper(fdis));
totalCount += blockInfo.getAccumulatedRecordCount();
}
}
final float avgWidth = totalCount == 0 ? 0 : ((float) stats.getTotalInputSize() / totalCount);
return new SequentialStatistics(stats.getLastModificationTime(), stats.getTotalInputSize(), avgWidth, totalCount);
}
Aggregations