use of org.jets3t.service.ServiceException in project hadoop by apache.
the class Jets3tNativeFileSystemStore method storeEmptyFile.
@Override
public void storeEmptyFile(String key) throws IOException {
try {
S3Object object = new S3Object(key);
object.setDataInputStream(new ByteArrayInputStream(new byte[0]));
object.setContentType("binary/octet-stream");
object.setContentLength(0);
object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
s3Service.putObject(bucket, object);
} catch (ServiceException e) {
handleException(e, key);
}
}
use of org.jets3t.service.ServiceException in project hadoop by apache.
the class Jets3tNativeFileSystemStore method storeFile.
@Override
public void storeFile(String key, File file, byte[] md5Hash) throws IOException {
if (multipartEnabled && file.length() >= multipartBlockSize) {
storeLargeFile(key, file, md5Hash);
return;
}
BufferedInputStream in = null;
try {
in = new BufferedInputStream(new FileInputStream(file));
S3Object object = new S3Object(key);
object.setDataInputStream(in);
object.setContentType("binary/octet-stream");
object.setContentLength(file.length());
object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
if (md5Hash != null) {
object.setMd5Hash(md5Hash);
}
s3Service.putObject(bucket, object);
} catch (ServiceException e) {
handleException(e, key);
} finally {
IOUtils.closeStream(in);
}
}
use of org.jets3t.service.ServiceException in project hadoop by apache.
the class Jets3tNativeFileSystemStore method list.
/**
* list objects
* @param prefix prefix
* @param delimiter delimiter
* @param maxListingLength max no. of entries
* @param priorLastKey last key in any previous search
* @return a list of matches
* @throws IOException on any reported failure
*/
private PartialListing list(String prefix, String delimiter, int maxListingLength, String priorLastKey) throws IOException {
try {
if (!prefix.isEmpty() && !prefix.endsWith(PATH_DELIMITER)) {
prefix += PATH_DELIMITER;
}
StorageObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(), prefix, delimiter, maxListingLength, priorLastKey);
FileMetadata[] fileMetadata = new FileMetadata[chunk.getObjects().length];
for (int i = 0; i < fileMetadata.length; i++) {
StorageObject object = chunk.getObjects()[i];
fileMetadata[i] = new FileMetadata(object.getKey(), object.getContentLength(), object.getLastModifiedDate().getTime());
}
return new PartialListing(chunk.getPriorLastKey(), fileMetadata, chunk.getCommonPrefixes());
} catch (ServiceException e) {
handleException(e, prefix);
// never returned - keep compiler happy
return null;
}
}
use of org.jets3t.service.ServiceException in project hadoop by apache.
the class Jets3tNativeFileSystemStore method copyLargeFile.
public void copyLargeFile(S3Object srcObject, String dstKey) throws IOException {
try {
long partCount = srcObject.getContentLength() / multipartCopyBlockSize + (srcObject.getContentLength() % multipartCopyBlockSize > 0 ? 1 : 0);
MultipartUpload multipartUpload = s3Service.multipartStartUpload(bucket.getName(), dstKey, srcObject.getMetadataMap());
List<MultipartPart> listedParts = new ArrayList<MultipartPart>();
for (int i = 0; i < partCount; i++) {
long byteRangeStart = i * multipartCopyBlockSize;
long byteLength;
if (i < partCount - 1) {
byteLength = multipartCopyBlockSize;
} else {
byteLength = srcObject.getContentLength() % multipartCopyBlockSize;
if (byteLength == 0) {
byteLength = multipartCopyBlockSize;
}
}
MultipartPart copiedPart = s3Service.multipartUploadPartCopy(multipartUpload, i + 1, bucket.getName(), srcObject.getKey(), null, null, null, null, byteRangeStart, byteRangeStart + byteLength - 1, null);
listedParts.add(copiedPart);
}
Collections.reverse(listedParts);
s3Service.multipartCompleteUpload(multipartUpload, listedParts);
} catch (ServiceException e) {
handleException(e, srcObject.getKey());
}
}
Aggregations