use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class SerializedListAccumulator method deserializeList.
@SuppressWarnings("unchecked")
public static <T> List<T> deserializeList(ArrayList<byte[]> data, TypeSerializer<T> serializer) throws IOException, ClassNotFoundException {
List<T> result = new ArrayList<T>(data.size());
for (byte[] bytes : data) {
ByteArrayInputStream inStream = new ByteArrayInputStream(bytes);
DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(inStream);
T val = serializer.deserialize(in);
result.add(val);
}
return result;
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class SocketStreamIterator method readNextFromStream.
private T readNextFromStream() throws Exception {
try {
if (inStream == null) {
connectedSocket = socket.accept();
inStream = new DataInputViewStreamWrapper(connectedSocket.getInputStream());
}
return serializer.deserialize(inStream);
} catch (EOFException e) {
try {
connectedSocket.close();
} catch (Throwable ignored) {
}
try {
socket.close();
} catch (Throwable ignored) {
}
return null;
} catch (Exception e) {
if (error == null) {
throw e;
} else {
// throw the root cause error
throw new Exception("Receiving stream failed: " + error.getMessage(), error);
}
}
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class InstantiationUtil method createCopyWritable.
/**
* Clones the given writable using the {@link IOReadableWritable serialization}.
*
* @param original Object to clone
* @param <T> Type of the object to clone
* @return Cloned object
* @throws IOException Thrown is the serialization fails.
*/
public static <T extends IOReadableWritable> T createCopyWritable(T original) throws IOException {
if (original == null) {
return null;
}
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(baos)) {
original.write(out);
}
final ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
try (DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais)) {
@SuppressWarnings("unchecked") T copy = (T) instantiate(original.getClass());
copy.read(in);
return copy;
}
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class SavepointStore method loadSavepointWithHandle.
/**
* Loads the savepoint at the specified path. This methods returns the savepoint, as well as the
* handle to the metadata.
*
* @param savepointFileOrDirectory Path to the parent savepoint directory or the meta data file.
* @param classLoader The class loader used to resolve serialized classes from legacy savepoint formats.
* @return The loaded savepoint
*
* @throws IOException Failures during load are forwarded
*/
public static Tuple2<Savepoint, StreamStateHandle> loadSavepointWithHandle(String savepointFileOrDirectory, ClassLoader classLoader) throws IOException {
checkNotNull(savepointFileOrDirectory, "savepointFileOrDirectory");
checkNotNull(classLoader, "classLoader");
Path path = new Path(savepointFileOrDirectory);
LOG.info("Loading savepoint from {}", path);
FileSystem fs = FileSystem.get(path.toUri());
FileStatus status = fs.getFileStatus(path);
// If this is a directory, we need to find the meta data file
if (status.isDir()) {
Path candidatePath = new Path(path, SAVEPOINT_METADATA_FILE);
if (fs.exists(candidatePath)) {
path = candidatePath;
LOG.info("Using savepoint file in {}", path);
} else {
throw new IOException("Cannot find meta data file in directory " + path + ". Please try to load the savepoint directly from the meta data file " + "instead of the directory.");
}
}
// load the savepoint
final Savepoint savepoint;
try (DataInputStream dis = new DataInputViewStreamWrapper(fs.open(path))) {
int magicNumber = dis.readInt();
if (magicNumber == MAGIC_NUMBER) {
int version = dis.readInt();
SavepointSerializer<?> serializer = SavepointSerializers.getSerializer(version);
savepoint = serializer.deserialize(dis, classLoader);
} else {
throw new RuntimeException("Unexpected magic number. This can have multiple reasons: " + "(1) You are trying to load a Flink 1.0 savepoint, which is not supported by this " + "version of Flink. (2) The file you were pointing to is not a savepoint at all. " + "(3) The savepoint file has been corrupted.");
}
}
// construct the stream handle to the metadata file
// we get the size best-effort
long size = 0;
try {
size = fs.getFileStatus(path).getLen();
} catch (Exception ignored) {
// we don't know the size, but we don't want to fail the savepoint loading for that
}
StreamStateHandle metadataHandle = new FileStateHandle(path, size);
return new Tuple2<>(savepoint, metadataHandle);
}
use of org.apache.flink.core.memory.DataInputViewStreamWrapper in project flink by apache.
the class SerializationProxiesTest method testKeyedStateMetaInfoSerialization.
@Test
public void testKeyedStateMetaInfoSerialization() throws Exception {
String name = "test";
TypeSerializer<?> namespaceSerializer = LongSerializer.INSTANCE;
TypeSerializer<?> stateSerializer = DoubleSerializer.INSTANCE;
KeyedBackendSerializationProxy.StateMetaInfo<?, ?> metaInfo = new KeyedBackendSerializationProxy.StateMetaInfo<>(StateDescriptor.Type.VALUE, name, namespaceSerializer, stateSerializer);
byte[] serialized;
try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
metaInfo.write(new DataOutputViewStreamWrapper(out));
serialized = out.toByteArray();
}
metaInfo = new KeyedBackendSerializationProxy.StateMetaInfo<>(Thread.currentThread().getContextClassLoader());
try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
metaInfo.read(new DataInputViewStreamWrapper(in));
}
Assert.assertEquals(name, metaInfo.getStateName());
}
Aggregations