use of org.apache.hadoop.fs.cosn.buffer.CosNByteBuffer in project hadoop-cos by tencentyun.
the class CosNFSDataOutputStream method uploadPart.
private void uploadPart() throws IOException {
this.currentBlockOutputStream.flush();
this.currentBlockOutputStream.close();
if (this.currentBlockId == 0) {
uploadId = (store).getUploadId(key);
}
this.currentBlockId++;
LOG.debug("upload part blockId: {}, uploadId: {}.", this.currentBlockId, this.uploadId);
final byte[] md5Hash = this.digest == null ? null : this.digest.digest();
ListenableFuture<PartETag> partETagListenableFuture = this.executorService.submit(new Callable<PartETag>() {
private final CosNByteBuffer buffer = currentBlockBuffer;
private final String localKey = key;
private final String localUploadId = uploadId;
private final int blockId = currentBlockId;
private final byte[] blockMD5Hash = md5Hash;
@Override
public PartETag call() throws Exception {
try {
PartETag partETag = (store).uploadPart(new BufferInputStream(this.buffer), this.localKey, this.localUploadId, this.blockId, this.buffer.getByteBuffer().remaining(), this.blockMD5Hash);
return partETag;
} finally {
BufferPool.getInstance().returnBuffer(this.buffer);
}
}
});
this.partEtagList.add(partETagListenableFuture);
try {
this.currentBlockBuffer = BufferPool.getInstance().getBuffer((int) this.blockSize);
} catch (InterruptedException e) {
String exceptionMsg = String.format("getting a buffer size: [%d] " + "from the buffer pool occurs an exception.", this.blockSize);
throw new IOException(exceptionMsg, e);
}
if (null != this.digest) {
this.digest.reset();
this.currentBlockOutputStream = new DigestOutputStream(new BufferOutputStream(this.currentBlockBuffer), this.digest);
} else {
this.currentBlockOutputStream = new BufferOutputStream(this.currentBlockBuffer);
}
}
Aggregations