Search in sources :

Example 1 with ByteArrayInputStreamWithPos

use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.

the class SerializationProxiesTest method testKeyedStateMetaInfoSerialization.

@Test
public void testKeyedStateMetaInfoSerialization() throws Exception {
    String name = "test";
    TypeSerializer<?> namespaceSerializer = LongSerializer.INSTANCE;
    TypeSerializer<?> stateSerializer = DoubleSerializer.INSTANCE;
    KeyedBackendSerializationProxy.StateMetaInfo<?, ?> metaInfo = new KeyedBackendSerializationProxy.StateMetaInfo<>(StateDescriptor.Type.VALUE, name, namespaceSerializer, stateSerializer);
    byte[] serialized;
    try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
        metaInfo.write(new DataOutputViewStreamWrapper(out));
        serialized = out.toByteArray();
    }
    metaInfo = new KeyedBackendSerializationProxy.StateMetaInfo<>(Thread.currentThread().getContextClassLoader());
    try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
        metaInfo.read(new DataInputViewStreamWrapper(in));
    }
    Assert.assertEquals(name, metaInfo.getStateName());
}
Also used : DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) ByteArrayInputStreamWithPos(org.apache.flink.core.memory.ByteArrayInputStreamWithPos) ByteArrayOutputStreamWithPos(org.apache.flink.core.memory.ByteArrayOutputStreamWithPos) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) Test(org.junit.Test)

Example 2 with ByteArrayInputStreamWithPos

use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.

the class RocksDBAggregatingState method mergeNamespaces.

@Override
public void mergeNamespaces(N target, Collection<N> sources) throws Exception {
    if (sources == null || sources.isEmpty()) {
        return;
    }
    // cache key and namespace
    final K key = backend.getCurrentKey();
    final int keyGroup = backend.getCurrentKeyGroupIndex();
    try {
        ACC current = null;
        // merge the sources to the target
        for (N source : sources) {
            if (source != null) {
                writeKeyWithGroupAndNamespace(keyGroup, key, source, keySerializationStream, keySerializationDataOutputView);
                final byte[] sourceKey = keySerializationStream.toByteArray();
                final byte[] valueBytes = backend.db.get(columnFamily, sourceKey);
                if (valueBytes != null) {
                    ACC value = valueSerializer.deserialize(new DataInputViewStreamWrapper(new ByteArrayInputStreamWithPos(valueBytes)));
                    if (current != null) {
                        current = aggFunction.merge(current, value);
                    } else {
                        current = value;
                    }
                }
            }
        }
        // if something came out of merging the sources, merge it or write it to the target
        if (current != null) {
            // create the target full-binary-key 
            writeKeyWithGroupAndNamespace(keyGroup, key, target, keySerializationStream, keySerializationDataOutputView);
            final byte[] targetKey = keySerializationStream.toByteArray();
            final byte[] targetValueBytes = backend.db.get(columnFamily, targetKey);
            if (targetValueBytes != null) {
                // target also had a value, merge
                ACC value = valueSerializer.deserialize(new DataInputViewStreamWrapper(new ByteArrayInputStreamWithPos(targetValueBytes)));
                current = aggFunction.merge(current, value);
            }
            // serialize the resulting value
            keySerializationStream.reset();
            valueSerializer.serialize(current, keySerializationDataOutputView);
            // write the resulting value
            backend.db.put(columnFamily, writeOptions, targetKey, keySerializationStream.toByteArray());
        }
    } catch (Exception e) {
        throw new Exception("Error while merging state in RocksDB", e);
    }
}
Also used : ByteArrayInputStreamWithPos(org.apache.flink.core.memory.ByteArrayInputStreamWithPos) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) IOException(java.io.IOException) RocksDBException(org.rocksdb.RocksDBException)

Example 3 with ByteArrayInputStreamWithPos

use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.

the class RocksDBKeyedStateBackend method restoreOldSavepointKeyedState.

/**
	 * For backwards compatibility, remove again later!
	 */
@Deprecated
private void restoreOldSavepointKeyedState(Collection<KeyGroupsStateHandle> restoreState) throws Exception {
    if (restoreState.isEmpty()) {
        return;
    }
    Preconditions.checkState(1 == restoreState.size(), "Only one element expected here.");
    HashMap<String, RocksDBStateBackend.FinalFullyAsyncSnapshot> namedStates;
    try (FSDataInputStream inputStream = restoreState.iterator().next().openInputStream()) {
        namedStates = InstantiationUtil.deserializeObject(inputStream, userCodeClassLoader);
    }
    Preconditions.checkState(1 == namedStates.size(), "Only one element expected here.");
    DataInputView inputView = namedStates.values().iterator().next().stateHandle.getState(userCodeClassLoader);
    // clear k/v state information before filling it
    kvStateInformation.clear();
    // first get the column family mapping
    int numColumns = inputView.readInt();
    Map<Byte, StateDescriptor<?, ?>> columnFamilyMapping = new HashMap<>(numColumns);
    for (int i = 0; i < numColumns; i++) {
        byte mappingByte = inputView.readByte();
        ObjectInputStream ooIn = new InstantiationUtil.ClassLoaderObjectInputStream(new DataInputViewStream(inputView), userCodeClassLoader);
        StateDescriptor stateDescriptor = (StateDescriptor) ooIn.readObject();
        columnFamilyMapping.put(mappingByte, stateDescriptor);
        // this will fill in the k/v state information
        getColumnFamily(stateDescriptor, MigrationNamespaceSerializerProxy.INSTANCE);
    }
    // try and read until EOF
    try {
        // the EOFException will get us out of this...
        while (true) {
            byte mappingByte = inputView.readByte();
            ColumnFamilyHandle handle = getColumnFamily(columnFamilyMapping.get(mappingByte), MigrationNamespaceSerializerProxy.INSTANCE);
            byte[] keyAndNamespace = BytePrimitiveArraySerializer.INSTANCE.deserialize(inputView);
            ByteArrayInputStreamWithPos bis = new ByteArrayInputStreamWithPos(keyAndNamespace);
            K reconstructedKey = keySerializer.deserialize(new DataInputViewStreamWrapper(bis));
            int len = bis.getPosition();
            int keyGroup = (byte) KeyGroupRangeAssignment.assignToKeyGroup(reconstructedKey, numberOfKeyGroups);
            if (keyGroupPrefixBytes == 1) {
                // copy and override one byte (42) between key and namespace
                System.arraycopy(keyAndNamespace, 0, keyAndNamespace, 1, len);
                keyAndNamespace[0] = (byte) keyGroup;
            } else {
                byte[] largerKey = new byte[1 + keyAndNamespace.length];
                // write key-group
                largerKey[0] = (byte) ((keyGroup >> 8) & 0xFF);
                largerKey[1] = (byte) (keyGroup & 0xFF);
                // write key
                System.arraycopy(keyAndNamespace, 0, largerKey, 2, len);
                //skip one byte (42), write namespace
                System.arraycopy(keyAndNamespace, 1 + len, largerKey, 2 + len, keyAndNamespace.length - len - 1);
                keyAndNamespace = largerKey;
            }
            byte[] value = BytePrimitiveArraySerializer.INSTANCE.deserialize(inputView);
            db.put(handle, keyAndNamespace, value);
        }
    } catch (EOFException e) {
    // expected
    }
}
Also used : HashMap(java.util.HashMap) DataInputView(org.apache.flink.core.memory.DataInputView) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) ColumnFamilyHandle(org.rocksdb.ColumnFamilyHandle) ByteArrayInputStreamWithPos(org.apache.flink.core.memory.ByteArrayInputStreamWithPos) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) MapStateDescriptor(org.apache.flink.api.common.state.MapStateDescriptor) AggregatingStateDescriptor(org.apache.flink.api.common.state.AggregatingStateDescriptor) StateDescriptor(org.apache.flink.api.common.state.StateDescriptor) ValueStateDescriptor(org.apache.flink.api.common.state.ValueStateDescriptor) FoldingStateDescriptor(org.apache.flink.api.common.state.FoldingStateDescriptor) EOFException(java.io.EOFException) FSDataInputStream(org.apache.flink.core.fs.FSDataInputStream) DataInputViewStream(org.apache.flink.api.java.typeutils.runtime.DataInputViewStream) ObjectInputStream(java.io.ObjectInputStream)

Example 4 with ByteArrayInputStreamWithPos

use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.

the class RocksDBMapState method deserializeUserValue.

private UV deserializeUserValue(byte[] rawValueBytes) throws IOException {
    ByteArrayInputStreamWithPos bais = new ByteArrayInputStreamWithPos(rawValueBytes);
    DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais);
    boolean isNull = in.readBoolean();
    return isNull ? null : userValueSerializer.deserialize(in);
}
Also used : ByteArrayInputStreamWithPos(org.apache.flink.core.memory.ByteArrayInputStreamWithPos) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper)

Example 5 with ByteArrayInputStreamWithPos

use of org.apache.flink.core.memory.ByteArrayInputStreamWithPos in project flink by apache.

the class TypeSerializerSerializationProxyTest method testStateSerializerSerializationProxyClassNotFound.

@Test
public void testStateSerializerSerializationProxyClassNotFound() throws Exception {
    TypeSerializer<?> serializer = IntSerializer.INSTANCE;
    TypeSerializerSerializationProxy<?> proxy = new TypeSerializerSerializationProxy<>(serializer);
    byte[] serialized;
    try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
        proxy.write(new DataOutputViewStreamWrapper(out));
        serialized = out.toByteArray();
    }
    proxy = new TypeSerializerSerializationProxy<>(new URLClassLoader(new URL[0], null));
    try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
        proxy.read(new DataInputViewStreamWrapper(in));
        fail("ClassNotFoundException expected, leading to IOException");
    } catch (IOException expected) {
    }
    proxy = new TypeSerializerSerializationProxy<>(new URLClassLoader(new URL[0], null), true);
    try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
        proxy.read(new DataInputViewStreamWrapper(in));
    }
    Assert.assertTrue(proxy.getTypeSerializer() instanceof TypeSerializerSerializationProxy.ClassNotFoundDummyTypeSerializer);
    Assert.assertArrayEquals(InstantiationUtil.serializeObject(serializer), ((TypeSerializerSerializationProxy.ClassNotFoundDummyTypeSerializer<?>) proxy.getTypeSerializer()).getActualBytes());
}
Also used : DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) URLClassLoader(java.net.URLClassLoader) ByteArrayInputStreamWithPos(org.apache.flink.core.memory.ByteArrayInputStreamWithPos) IOException(java.io.IOException) ByteArrayOutputStreamWithPos(org.apache.flink.core.memory.ByteArrayOutputStreamWithPos) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) Test(org.junit.Test)

Aggregations

ByteArrayInputStreamWithPos (org.apache.flink.core.memory.ByteArrayInputStreamWithPos)19 DataInputViewStreamWrapper (org.apache.flink.core.memory.DataInputViewStreamWrapper)19 DataOutputViewStreamWrapper (org.apache.flink.core.memory.DataOutputViewStreamWrapper)12 ByteArrayOutputStreamWithPos (org.apache.flink.core.memory.ByteArrayOutputStreamWithPos)10 Test (org.junit.Test)9 IOException (java.io.IOException)7 RocksDBException (org.rocksdb.RocksDBException)6 ArrayList (java.util.ArrayList)2 EOFException (java.io.EOFException)1 ObjectInputStream (java.io.ObjectInputStream)1 URLClassLoader (java.net.URLClassLoader)1 HashMap (java.util.HashMap)1 AggregatingStateDescriptor (org.apache.flink.api.common.state.AggregatingStateDescriptor)1 FoldingStateDescriptor (org.apache.flink.api.common.state.FoldingStateDescriptor)1 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)1 MapStateDescriptor (org.apache.flink.api.common.state.MapStateDescriptor)1 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)1 StateDescriptor (org.apache.flink.api.common.state.StateDescriptor)1 ValueStateDescriptor (org.apache.flink.api.common.state.ValueStateDescriptor)1 DataInputViewStream (org.apache.flink.api.java.typeutils.runtime.DataInputViewStream)1