use of com.github.ambry.store.Message in project ambry by linkedin.
the class MessageSievingInputStream method validateAndTransform.
/**
* Validates and potentially transforms the given input stream consisting of message data. It does so using the list
* of {@link Transformer}s associated with this instance.
* message corruption and acceptable formats.
* @param inMsg the original {@link Message} that needs to be validated and possibly transformed.
* @param msgStreamList the output list to which the sieved stream output are to be added to.
* @param msgOffset the offset of the message in the stream.
* @throws IOException if an exception was encountered reading or writing bytes to/from streams.
*/
private void validateAndTransform(Message inMsg, List<InputStream> msgStreamList, int msgOffset) throws IOException {
if (transformers == null || transformers.isEmpty()) {
// Write the message without any transformations.
sievedMessageInfoList.add(inMsg.getMessageInfo());
msgStreamList.add(inMsg.getStream());
} else {
long sieveStartTime = SystemTime.getInstance().milliseconds();
Message msg = inMsg;
TransformationOutput output = null;
for (Transformer transformer : transformers) {
output = transformer.transform(msg);
if (output.getException() != null || output.getMsg() == null) {
break;
} else {
msg = output.getMsg();
}
}
if (output.getException() != null) {
if (output.getException() instanceof MessageFormatException) {
logger.error("Error validating/transforming the message at {} with messageInfo {} and hence skipping the message", msgOffset, inMsg.getMessageInfo(), output.getException());
hasInvalidMessages = true;
messageSievingCorruptMessagesDiscardedCount.inc();
} else {
throw new IOException("Encountered exception during transformation", output.getException());
}
} else if (output.getMsg() == null) {
logger.trace("Transformation is on, and the message with id {} does not have a replacement and was discarded.", inMsg.getMessageInfo().getStoreKey());
hasDeprecatedMessages = true;
messageSievingDeprecatedMessagesDiscardedCount.inc();
} else {
MessageInfo tfmMsgInfo = output.getMsg().getMessageInfo();
sievedMessageInfoList.add(tfmMsgInfo);
msgStreamList.add(output.getMsg().getStream());
logger.trace("Original message length {}, transformed bytes read {}", inMsg.getMessageInfo().getSize(), tfmMsgInfo.getSize());
}
singleMessageSieveTime.update(SystemTime.getInstance().milliseconds() - sieveStartTime);
}
}
use of com.github.ambry.store.Message in project ambry by linkedin.
the class BlobIdTransformerTest method testNullComponentsTransformInput.
/**
* Tests using the transformer with Message inputs that have null components
* @throws Exception
*/
@Test
public void testNullComponentsTransformInput() throws Exception {
MessageInfo messageInfo = new MessageInfo(createBlobId(VERSION_1_UNCONVERTED), 123, (short) 123, (short) 123, 0L);
// null msgBytes
Message message = new Message(messageInfo, null);
assertException(transformer.transform(message), NullPointerException.class);
// null messageInfo
message = new Message(null, new ByteArrayInputStream(new byte[30]));
assertException(transformer.transform(message), NullPointerException.class);
}
use of com.github.ambry.store.Message in project ambry by linkedin.
the class BlobIdTransformer method newMessage.
/**
* Creates a Message from the old Message
* input stream, replacing the old store key and account/container IDs
* with a new store key and account/container IDs
* @param inputStream the input stream of the Message
* @param newKey the new StoreKey
* @param oldMessageInfo the {@link MessageInfo} of the message being transformed
* @return new Message message
* @throws Exception
*/
private Message newMessage(InputStream inputStream, StoreKey newKey, MessageInfo oldMessageInfo) throws Exception {
MessageHeader_Format headerFormat = getMessageHeader(inputStream);
storeKeyFactory.getStoreKey(new DataInputStream(inputStream));
BlobId newBlobId = (BlobId) newKey;
if (headerFormat.isPutRecord()) {
if (headerFormat.hasLifeVersion() && headerFormat.getLifeVersion() != oldMessageInfo.getLifeVersion()) {
// The original Put buffer might have lifeVersion as 0, but the message info might have a higher lifeVersion.
logger.trace("LifeVersion in stream: {} failed to match lifeVersion from Index: {} for key {}", headerFormat.getLifeVersion(), oldMessageInfo.getLifeVersion(), oldMessageInfo.getStoreKey());
}
ByteBuffer blobEncryptionKey = null;
if (headerFormat.hasEncryptionKeyRecord()) {
blobEncryptionKey = deserializeBlobEncryptionKey(inputStream);
}
BlobProperties oldProperties = deserializeBlobProperties(inputStream);
ByteBuffer userMetaData = deserializeUserMetadata(inputStream);
BlobData blobData = deserializeBlob(inputStream);
ByteBuf blobDataBytes = blobData.content();
long blobPropertiesSize = oldProperties.getBlobSize();
// will be rewritten with transformed IDs
if (blobData.getBlobType().equals(BlobType.MetadataBlob)) {
ByteBuffer serializedMetadataContent = blobDataBytes.nioBuffer();
CompositeBlobInfo compositeBlobInfo = MetadataContentSerDe.deserializeMetadataContentRecord(serializedMetadataContent, storeKeyFactory);
Map<StoreKey, StoreKey> convertedKeys = storeKeyConverter.convert(compositeBlobInfo.getKeys());
List<StoreKey> newKeys = new ArrayList<>();
boolean isOldMetadataKeyDifferentFromNew = !oldMessageInfo.getStoreKey().getID().equals(newKey.getID());
short metadataAccountId = newBlobId.getAccountId();
short metadataContainerId = newBlobId.getContainerId();
for (StoreKey oldDataChunkKey : compositeBlobInfo.getKeys()) {
StoreKey newDataChunkKey = convertedKeys.get(oldDataChunkKey);
if (newDataChunkKey == null) {
throw new IllegalStateException("Found metadata chunk with a deprecated data chunk. " + " Old MetadataID: " + oldMessageInfo.getStoreKey().getID() + " New MetadataID: " + newKey.getID() + " Old Datachunk ID: " + oldDataChunkKey.getID());
}
if (isOldMetadataKeyDifferentFromNew && newDataChunkKey.getID().equals(oldDataChunkKey.getID())) {
throw new IllegalStateException("Found changed metadata chunk with an unchanged data chunk" + " Old MetadataID: " + oldMessageInfo.getStoreKey().getID() + " New MetadataID: " + newKey.getID() + " Old Datachunk ID: " + oldDataChunkKey.getID());
}
if (!isOldMetadataKeyDifferentFromNew && !newDataChunkKey.getID().equals(oldDataChunkKey.getID())) {
throw new IllegalStateException("Found unchanged metadata chunk with a changed data chunk" + " Old MetadataID: " + oldMessageInfo.getStoreKey().getID() + " New MetadataID: " + newKey.getID() + " Old Datachunk ID: " + oldDataChunkKey.getID() + " New Datachunk ID: " + newDataChunkKey.getID());
}
BlobId newDataChunkBlobId = (BlobId) newDataChunkKey;
if (newDataChunkBlobId.getAccountId() != metadataAccountId || newDataChunkBlobId.getContainerId() != metadataContainerId) {
throw new IllegalStateException("Found changed metadata chunk with a datachunk with a different account/container" + " Old MetadataID: " + oldMessageInfo.getStoreKey().getID() + " New MetadataID: " + newKey.getID() + " Old Datachunk ID: " + oldDataChunkKey.getID() + " New Datachunk ID: " + newDataChunkBlobId.getID() + " Metadata AccountId: " + metadataAccountId + " Metadata ContainerId: " + metadataContainerId + " Datachunk AccountId: " + newDataChunkBlobId.getAccountId() + " Datachunk ContainerId: " + newDataChunkBlobId.getContainerId());
}
newKeys.add(newDataChunkKey);
}
ByteBuffer metadataContent;
if (compositeBlobInfo.getMetadataContentVersion() == Metadata_Content_Version_V2) {
metadataContent = MetadataContentSerDe.serializeMetadataContentV2(compositeBlobInfo.getChunkSize(), compositeBlobInfo.getTotalSize(), newKeys);
} else if (compositeBlobInfo.getMetadataContentVersion() == Metadata_Content_Version_V3) {
List<Pair<StoreKey, Long>> keyAndSizeList = new ArrayList<>();
List<CompositeBlobInfo.ChunkMetadata> chunkMetadataList = compositeBlobInfo.getChunkMetadataList();
for (int i = 0; i < newKeys.size(); i++) {
keyAndSizeList.add(new Pair<>(newKeys.get(i), chunkMetadataList.get(i).getSize()));
}
metadataContent = MetadataContentSerDe.serializeMetadataContentV3(compositeBlobInfo.getTotalSize(), keyAndSizeList);
} else {
throw new IllegalStateException("Unexpected metadata content version from composite blob: " + compositeBlobInfo.getMetadataContentVersion());
}
blobPropertiesSize = compositeBlobInfo.getTotalSize();
metadataContent.flip();
blobDataBytes.release();
blobDataBytes = Unpooled.wrappedBuffer(metadataContent);
blobData = new BlobData(blobData.getBlobType(), metadataContent.remaining(), blobDataBytes);
}
BlobProperties newProperties = new BlobProperties(blobPropertiesSize, oldProperties.getServiceId(), oldProperties.getOwnerId(), oldProperties.getContentType(), oldProperties.isPrivate(), oldProperties.getTimeToLiveInSeconds(), oldProperties.getCreationTimeInMs(), newBlobId.getAccountId(), newBlobId.getContainerId(), oldProperties.isEncrypted(), oldProperties.getExternalAssetTag(), oldProperties.getContentEncoding(), oldProperties.getFilename());
// BlobIDTransformer only exists on ambry-server and replication between servers is relying on blocking channel
// which is still using java ByteBuffer. So, no need to consider releasing stuff.
// @todo, when netty Bytebuf is adopted for blocking channel on ambry-server, remember to release this ByteBuf.
PutMessageFormatInputStream putMessageFormatInputStream = new PutMessageFormatInputStream(newKey, blobEncryptionKey, newProperties, userMetaData, new ByteBufInputStream(blobDataBytes, true), blobData.getSize(), blobData.getBlobType(), oldMessageInfo.getLifeVersion());
// Reuse the original CRC if present in the oldMessageInfo. This is important to ensure that messages that are
// received via replication are sent to the store with proper CRCs (which the store needs to detect duplicate
// messages). As an additional guard, here the original CRC is only reused if the key's ID in string form is the
// same after conversion.
Long originalCrc = oldMessageInfo.getStoreKey().getID().equals(newKey.getID()) ? oldMessageInfo.getCrc() : null;
MessageInfo info = new MessageInfo.Builder(newKey, putMessageFormatInputStream.getSize(), newProperties.getAccountId(), newProperties.getContainerId(), oldMessageInfo.getOperationTimeMs()).isTtlUpdated(oldMessageInfo.isTtlUpdated()).expirationTimeInMs(oldMessageInfo.getExpirationTimeInMs()).crc(originalCrc).lifeVersion(oldMessageInfo.getLifeVersion()).build();
return new Message(info, putMessageFormatInputStream);
} else {
throw new IllegalArgumentException("Only 'put' records are valid");
}
}
use of com.github.ambry.store.Message in project ambry by linkedin.
the class BlobIdTransformer method transform.
@Override
public TransformationOutput transform(Message message) {
Message transformedMsg = null;
try {
Objects.requireNonNull(message, "message must not be null");
Objects.requireNonNull(message.getMessageInfo(), "message's messageInfo must not be null");
Objects.requireNonNull(message.getStream(), "message's inputStream must not be null");
StoreKey oldStoreKey = message.getMessageInfo().getStoreKey();
StoreKey newStoreKey = storeKeyConverter.getConverted(oldStoreKey);
if (newStoreKey != null) {
transformedMsg = newMessage(message.getStream(), newStoreKey, message.getMessageInfo());
}
} catch (Exception e) {
return new TransformationOutput(e);
}
return new TransformationOutput(transformedMsg);
}
use of com.github.ambry.store.Message in project ambry by linkedin.
the class ValidatingKeyConvertingTransformer method transform.
@Override
public TransformationOutput transform(Message message) {
ByteBuffer encryptionKey;
BlobProperties props;
ByteBuffer metadata;
BlobData blobData;
MessageInfo msgInfo = message.getMessageInfo();
InputStream msgStream = message.getStream();
TransformationOutput transformationOutput;
try {
// Read header
ByteBuffer headerVersion = ByteBuffer.allocate(Version_Field_Size_In_Bytes);
msgStream.read(headerVersion.array());
short version = headerVersion.getShort();
if (!isValidHeaderVersion(version)) {
throw new MessageFormatException("Header version not supported " + version, MessageFormatErrorCodes.Data_Corrupt);
}
int headerSize = getHeaderSizeForVersion(version);
ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize);
headerBuffer.put(headerVersion.array());
msgStream.read(headerBuffer.array(), Version_Field_Size_In_Bytes, headerSize - Version_Field_Size_In_Bytes);
headerBuffer.rewind();
MessageHeader_Format header = getMessageHeader(version, headerBuffer);
header.verifyHeader();
StoreKey originalKey = storeKeyFactory.getStoreKey(new DataInputStream(msgStream));
if (header.isPutRecord()) {
encryptionKey = header.hasEncryptionKeyRecord() ? deserializeBlobEncryptionKey(msgStream) : null;
props = deserializeBlobProperties(msgStream);
metadata = deserializeUserMetadata(msgStream);
blobData = deserializeBlob(msgStream);
} else {
throw new IllegalArgumentException("Message cannot be a deleted record ");
}
if (msgInfo.getStoreKey().equals(originalKey)) {
StoreKey newKey = storeKeyConverter.convert(Collections.singletonList(originalKey)).get(originalKey);
if (newKey == null) {
System.out.println("No mapping for the given key, transformed message will be null");
transformationOutput = new TransformationOutput((Message) null);
} else {
MessageInfo transformedMsgInfo;
PutMessageFormatInputStream transformedStream = new PutMessageFormatInputStream(newKey, encryptionKey, props, metadata, new ByteBufInputStream(blobData.content(), true), blobData.getSize(), blobData.getBlobType(), msgInfo.getLifeVersion());
transformedMsgInfo = new MessageInfo.Builder(msgInfo).storeKey(newKey).size(transformedStream.getSize()).isUndeleted(false).build();
transformationOutput = new TransformationOutput(new Message(transformedMsgInfo, transformedStream));
}
} else {
throw new IllegalStateException("StoreKey in log " + originalKey + " failed to match store key from Index " + msgInfo.getStoreKey());
}
} catch (Exception e) {
transformationOutput = new TransformationOutput(e);
}
return transformationOutput;
}
Aggregations