use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class CompositeKeySerializationUtilsTest method testKeySerializationAndDeserialization.
@Test
public void testKeySerializationAndDeserialization() throws Exception {
final DataOutputSerializer outputView = new DataOutputSerializer(8);
final DataInputDeserializer inputView = new DataInputDeserializer();
// test for key
for (int orgKey = 0; orgKey < 100; ++orgKey) {
outputView.clear();
CompositeKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
inputView.setBuffer(outputView.getCopyOfBuffer());
int deserializedKey = CompositeKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
Assert.assertEquals(orgKey, deserializedKey);
CompositeKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
inputView.setBuffer(outputView.getCopyOfBuffer());
deserializedKey = CompositeKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
Assert.assertEquals(orgKey, deserializedKey);
}
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class CompositeKeySerializationUtilsTest method testNamespaceSerializationAndDeserialization.
@Test
public void testNamespaceSerializationAndDeserialization() throws Exception {
final DataOutputSerializer outputView = new DataOutputSerializer(8);
final DataInputDeserializer inputView = new DataInputDeserializer();
for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) {
outputView.clear();
CompositeKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false);
inputView.setBuffer(outputView.getCopyOfBuffer());
int deserializedNamepsace = CompositeKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false);
Assert.assertEquals(orgNamespace, deserializedNamepsace);
CompositeKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true);
inputView.setBuffer(outputView.getCopyOfBuffer());
deserializedNamepsace = CompositeKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true);
Assert.assertEquals(orgNamespace, deserializedNamepsace);
}
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class RocksDBIncrementalCheckpointUtilsTest method testClipDBWithKeyGroupRangeHelper.
private void testClipDBWithKeyGroupRangeHelper(KeyGroupRange targetGroupRange, KeyGroupRange currentGroupRange, int keyGroupPrefixBytes) throws RocksDBException, IOException {
try (RocksDB rocksDB = RocksDB.open(tmp.newFolder().getAbsolutePath());
ColumnFamilyHandle columnFamilyHandle = rocksDB.createColumnFamily(new ColumnFamilyDescriptor("test".getBytes()))) {
int currentGroupRangeStart = currentGroupRange.getStartKeyGroup();
int currentGroupRangeEnd = currentGroupRange.getEndKeyGroup();
DataOutputSerializer outputView = new DataOutputSerializer(32);
for (int i = currentGroupRangeStart; i <= currentGroupRangeEnd; ++i) {
for (int j = 0; j < 100; ++j) {
outputView.clear();
CompositeKeySerializationUtils.writeKeyGroup(i, keyGroupPrefixBytes, outputView);
CompositeKeySerializationUtils.writeKey(j, IntSerializer.INSTANCE, outputView, false);
rocksDB.put(columnFamilyHandle, outputView.getCopyOfBuffer(), String.valueOf(j).getBytes());
}
}
for (int i = currentGroupRangeStart; i <= currentGroupRangeEnd; ++i) {
for (int j = 0; j < 100; ++j) {
outputView.clear();
CompositeKeySerializationUtils.writeKeyGroup(i, keyGroupPrefixBytes, outputView);
CompositeKeySerializationUtils.writeKey(j, IntSerializer.INSTANCE, outputView, false);
byte[] value = rocksDB.get(columnFamilyHandle, outputView.getCopyOfBuffer());
Assert.assertEquals(String.valueOf(j), new String(value));
}
}
RocksDBIncrementalCheckpointUtils.clipDBWithKeyGroupRange(rocksDB, Collections.singletonList(columnFamilyHandle), targetGroupRange, currentGroupRange, keyGroupPrefixBytes, RocksDBConfigurableOptions.WRITE_BATCH_SIZE.defaultValue().getBytes());
for (int i = currentGroupRangeStart; i <= currentGroupRangeEnd; ++i) {
for (int j = 0; j < 100; ++j) {
outputView.clear();
CompositeKeySerializationUtils.writeKeyGroup(i, keyGroupPrefixBytes, outputView);
CompositeKeySerializationUtils.writeKey(j, IntSerializer.INSTANCE, outputView, false);
byte[] value = rocksDB.get(columnFamilyHandle, outputView.getCopyOfBuffer());
if (targetGroupRange.contains(i)) {
Assert.assertEquals(String.valueOf(j), new String(value));
} else {
Assert.assertNull(value);
}
}
}
}
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class EventSerializer method toSerializedEvent.
// ------------------------------------------------------------------------
// Serialization Logic
// ------------------------------------------------------------------------
public static ByteBuffer toSerializedEvent(AbstractEvent event) throws IOException {
final Class<?> eventClass = event.getClass();
if (eventClass == EndOfPartitionEvent.class) {
return ByteBuffer.wrap(new byte[] { 0, 0, 0, END_OF_PARTITION_EVENT });
} else if (eventClass == CheckpointBarrier.class) {
return serializeCheckpointBarrier((CheckpointBarrier) event);
} else if (eventClass == EndOfSuperstepEvent.class) {
return ByteBuffer.wrap(new byte[] { 0, 0, 0, END_OF_SUPERSTEP_EVENT });
} else if (eventClass == EndOfChannelStateEvent.class) {
return ByteBuffer.wrap(new byte[] { 0, 0, 0, END_OF_CHANNEL_STATE_EVENT });
} else if (eventClass == EndOfData.class) {
return ByteBuffer.wrap(new byte[] { 0, 0, 0, END_OF_USER_RECORDS_EVENT, (byte) ((EndOfData) event).getStopMode().ordinal() });
} else if (eventClass == CancelCheckpointMarker.class) {
CancelCheckpointMarker marker = (CancelCheckpointMarker) event;
ByteBuffer buf = ByteBuffer.allocate(12);
buf.putInt(0, CANCEL_CHECKPOINT_MARKER_EVENT);
buf.putLong(4, marker.getCheckpointId());
return buf;
} else if (eventClass == EventAnnouncement.class) {
EventAnnouncement announcement = (EventAnnouncement) event;
ByteBuffer serializedAnnouncedEvent = toSerializedEvent(announcement.getAnnouncedEvent());
ByteBuffer serializedAnnouncement = ByteBuffer.allocate(2 * Integer.BYTES + serializedAnnouncedEvent.capacity());
serializedAnnouncement.putInt(0, ANNOUNCEMENT_EVENT);
serializedAnnouncement.putInt(4, announcement.getSequenceNumber());
serializedAnnouncement.position(8);
serializedAnnouncement.put(serializedAnnouncedEvent);
serializedAnnouncement.flip();
return serializedAnnouncement;
} else if (eventClass == SubtaskConnectionDescriptor.class) {
SubtaskConnectionDescriptor selector = (SubtaskConnectionDescriptor) event;
ByteBuffer buf = ByteBuffer.allocate(12);
buf.putInt(VIRTUAL_CHANNEL_SELECTOR_EVENT);
buf.putInt(selector.getInputSubtaskIndex());
buf.putInt(selector.getOutputSubtaskIndex());
buf.flip();
return buf;
} else {
try {
final DataOutputSerializer serializer = new DataOutputSerializer(128);
serializer.writeInt(OTHER_EVENT);
serializer.writeUTF(event.getClass().getName());
event.write(serializer);
return serializer.wrapAsByteBuffer();
} catch (IOException e) {
throw new IOException("Error while serializing event.", e);
}
}
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class MultiInputSortingDataInput method wrapInputs.
public static <K> SelectableSortingInputs wrapInputs(TaskInvokable containingTask, StreamTaskInput<Object>[] sortingInputs, KeySelector<Object, K>[] keySelectors, TypeSerializer<Object>[] inputSerializers, TypeSerializer<K> keySerializer, StreamTaskInput<Object>[] passThroughInputs, MemoryManager memoryManager, IOManager ioManager, boolean objectReuse, double managedMemoryFraction, Configuration jobConfiguration, ExecutionConfig executionConfig) {
int keyLength = keySerializer.getLength();
final TypeComparator<Tuple2<byte[], StreamRecord<Object>>> comparator;
DataOutputSerializer dataOutputSerializer;
if (keyLength > 0) {
dataOutputSerializer = new DataOutputSerializer(keyLength);
comparator = new FixedLengthByteKeyComparator<>(keyLength);
} else {
dataOutputSerializer = new DataOutputSerializer(64);
comparator = new VariableLengthByteKeyComparator<>();
}
List<Integer> passThroughInputIndices = Arrays.stream(passThroughInputs).map(StreamTaskInput::getInputIndex).collect(Collectors.toList());
int numberOfInputs = sortingInputs.length + passThroughInputs.length;
CommonContext commonContext = new CommonContext(sortingInputs);
InputSelector inputSelector = new InputSelector(commonContext, numberOfInputs, passThroughInputIndices);
StreamTaskInput<?>[] wrappedSortingInputs = IntStream.range(0, sortingInputs.length).mapToObj(idx -> {
try {
KeyAndValueSerializer<Object> keyAndValueSerializer = new KeyAndValueSerializer<>(inputSerializers[idx], keyLength);
return new MultiInputSortingDataInput<>(commonContext, sortingInputs[idx], sortingInputs[idx].getInputIndex(), ExternalSorter.newBuilder(memoryManager, containingTask, keyAndValueSerializer, comparator, executionConfig).memoryFraction(managedMemoryFraction / numberOfInputs).enableSpilling(ioManager, jobConfiguration.get(AlgorithmOptions.SORT_SPILLING_THRESHOLD)).maxNumFileHandles(jobConfiguration.get(AlgorithmOptions.SPILLING_MAX_FAN) / numberOfInputs).objectReuse(objectReuse).largeRecords(true).build(), keySelectors[idx], keySerializer, dataOutputSerializer);
} catch (MemoryAllocationException e) {
throw new RuntimeException();
}
}).toArray(StreamTaskInput[]::new);
StreamTaskInput<?>[] wrappedPassThroughInputs = Arrays.stream(passThroughInputs).map(input -> new ObservableStreamTaskInput<>(input, inputSelector)).toArray(StreamTaskInput[]::new);
return new SelectableSortingInputs(wrappedSortingInputs, wrappedPassThroughInputs, inputSelector);
}
Aggregations