use of alluxio.grpc.CreateFilePOptions in project alluxio by Alluxio.
the class LocalBlockInStreamIntegrationTest method skip.
/**
* Tests {@link alluxio.client.block.LocalBlockInStream#skip(long)}.
*/
@Test
public void skip() throws Exception {
for (int k = MIN_LEN + DELTA; k <= MAX_LEN; k += DELTA) {
for (CreateFilePOptions op : getOptionSet()) {
AlluxioURI uri = new AlluxioURI(sTestPath + "/file_" + k + "_" + op.hashCode());
FileInStream is = sFileSystem.openFile(uri, sReadNoCache);
Assert.assertEquals(k / 2, is.skip(k / 2));
Assert.assertEquals(k / 2, is.read());
is.close();
is = sFileSystem.openFile(uri, sReadCachePromote);
int t = k / 3;
Assert.assertEquals(t, is.skip(t));
Assert.assertEquals(t, is.read());
Assert.assertEquals(t, is.skip(t));
Assert.assertEquals(2 * t + 1, is.read());
is.close();
Assert.assertTrue(sFileSystem.getStatus(uri).getInAlluxioPercentage() == 100);
}
}
}
use of alluxio.grpc.CreateFilePOptions in project alluxio by Alluxio.
the class LocalBlockInStreamIntegrationTest method readTest2.
/**
* Tests {@link alluxio.client.block.LocalBlockInStream#read(byte[])}.
*/
@Test
public void readTest2() throws Exception {
for (int k = MIN_LEN; k <= MAX_LEN; k += DELTA) {
for (CreateFilePOptions op : getOptionSet()) {
AlluxioURI uri = new AlluxioURI(sTestPath + "/file_" + k + "_" + op.hashCode());
FileInStream is = sFileSystem.openFile(uri, sReadNoCache);
byte[] ret = new byte[k];
Assert.assertEquals(k, is.read(ret));
Assert.assertTrue(BufferUtils.equalIncreasingByteArray(k, ret));
is.close();
is = sFileSystem.openFile(uri, sReadCachePromote);
ret = new byte[k];
Assert.assertEquals(k, is.read(ret));
Assert.assertTrue(BufferUtils.equalIncreasingByteArray(k, ret));
is.close();
Assert.assertTrue(sFileSystem.getStatus(uri).getInAlluxioPercentage() == 100);
}
}
}
use of alluxio.grpc.CreateFilePOptions in project alluxio by Alluxio.
the class LocalBlockInStreamIntegrationTest method beforeClass.
@BeforeClass
public static final void beforeClass() throws Exception {
sFileSystem = sLocalAlluxioClusterResource.get().getClient();
sWriteBoth = CreateFilePOptions.newBuilder().setWriteType(WritePType.CACHE_THROUGH).setRecursive(true).build();
sWriteAlluxio = CreateFilePOptions.newBuilder().setWriteType(WritePType.MUST_CACHE).setRecursive(true).build();
sReadCachePromote = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build();
sReadNoCache = OpenFilePOptions.newBuilder().setReadType(ReadPType.NO_CACHE).build();
sTestPath = PathUtils.uniqPath();
// Create files of varying size and write type to later read from
for (int k = MIN_LEN; k <= MAX_LEN; k += DELTA) {
for (CreateFilePOptions op : getOptionSet()) {
AlluxioURI path = new AlluxioURI(sTestPath + "/file_" + k + "_" + op.hashCode());
FileSystemTestUtils.createByteFile(sFileSystem, path, op, k);
}
}
}
use of alluxio.grpc.CreateFilePOptions in project alluxio by Alluxio.
the class S3RestServiceHandler method createObjectOrUploadPart.
/**
* @summary uploads an object or part of an object in multipart upload
* @param authorization header parameter authorization
* @param contentMD5 the optional Base64 encoded 128-bit MD5 digest of the object
* @param copySource the source path to copy the new file from
* @param decodedLength the length of the content when in aws-chunked encoding
* @param contentLength the total length of the request body
* @param bucket the bucket name
* @param object the object name
* @param partNumber the identification of the part of the object in multipart upload,
* otherwise null
* @param uploadId the upload ID of the multipart upload, otherwise null
* @param is the request body
* @return the response object
*/
@PUT
@Path(OBJECT_PARAM)
@Consumes(MediaType.WILDCARD)
public Response createObjectOrUploadPart(@HeaderParam("Authorization") String authorization, @HeaderParam("Content-MD5") final String contentMD5, @HeaderParam("x-amz-copy-source") String copySource, @HeaderParam("x-amz-decoded-content-length") String decodedLength, @HeaderParam("Content-Length") String contentLength, @PathParam("bucket") final String bucket, @PathParam("object") final String object, @QueryParam("partNumber") final Integer partNumber, @QueryParam("uploadId") final Long uploadId, final InputStream is) {
return S3RestUtils.call(bucket, () -> {
Preconditions.checkNotNull(bucket, "required 'bucket' parameter is missing");
Preconditions.checkNotNull(object, "required 'object' parameter is missing");
Preconditions.checkArgument((partNumber == null && uploadId == null) || (partNumber != null && uploadId != null), "'partNumber' and 'uploadId' parameter should appear together or be " + "missing together.");
String bucketPath = S3RestUtils.parsePath(AlluxioURI.SEPARATOR + bucket);
final FileSystem fs = getFileSystem(authorization);
S3RestUtils.checkPathIsAlluxioDirectory(fs, bucketPath);
String objectPath = bucketPath + AlluxioURI.SEPARATOR + object;
CreateDirectoryPOptions dirOptions = CreateDirectoryPOptions.newBuilder().setRecursive(true).setAllowExists(true).build();
if (objectPath.endsWith(AlluxioURI.SEPARATOR)) {
// Need to create a folder
try {
fs.createDirectory(new AlluxioURI(objectPath), dirOptions);
} catch (FileAlreadyExistsException e) {
// ok if directory already exists the user wanted to create it anyway
LOG.warn("attempting to create dir which already exists");
} catch (IOException | AlluxioException e) {
throw S3RestUtils.toObjectS3Exception(e, objectPath);
}
return Response.ok().build();
}
if (partNumber != null) {
// This object is part of a multipart upload, should be uploaded into the temporary
// directory first.
String tmpDir = S3RestUtils.getMultipartTemporaryDirForObject(bucketPath, object);
S3RestUtils.checkUploadId(fs, new AlluxioURI(tmpDir), uploadId);
objectPath = tmpDir + AlluxioURI.SEPARATOR + partNumber;
}
AlluxioURI objectURI = new AlluxioURI(objectPath);
// remove exist object
deleteExistObject(fs, objectURI);
CreateFilePOptions filePOptions = CreateFilePOptions.newBuilder().setRecursive(true).setWriteType(S3RestUtils.getS3WriteType()).build();
// not copying from an existing file
if (copySource == null) {
try {
MessageDigest md5 = MessageDigest.getInstance("MD5");
// The request body can be in the aws-chunked encoding format, or not encoded at all
// determine if it's encoded, and then which parts of the stream to read depending on
// the encoding type.
boolean isChunkedEncoding = decodedLength != null;
int toRead;
InputStream readStream = is;
if (isChunkedEncoding) {
toRead = Integer.parseInt(decodedLength);
readStream = new ChunkedEncodingInputStream(is);
} else {
toRead = Integer.parseInt(contentLength);
}
FileOutStream os = fs.createFile(objectURI, filePOptions);
try (DigestOutputStream digestOutputStream = new DigestOutputStream(os, md5)) {
long read = ByteStreams.copy(ByteStreams.limit(readStream, toRead), digestOutputStream);
if (read < toRead) {
throw new IOException(String.format("Failed to read all required bytes from the stream. Read %d/%d", read, toRead));
}
}
byte[] digest = md5.digest();
String base64Digest = BaseEncoding.base64().encode(digest);
if (contentMD5 != null && !contentMD5.equals(base64Digest)) {
// The object may be corrupted, delete the written object and return an error.
try {
fs.delete(objectURI, DeletePOptions.newBuilder().setRecursive(true).build());
} catch (Exception e2) {
// intend to continue and return BAD_DIGEST S3Exception.
}
throw new S3Exception(objectURI.getPath(), S3ErrorCode.BAD_DIGEST);
}
String entityTag = Hex.encodeHexString(digest);
return Response.ok().tag(entityTag).build();
} catch (Exception e) {
throw S3RestUtils.toObjectS3Exception(e, objectPath);
}
} else {
try (FileInStream in = fs.openFile(new AlluxioURI(!copySource.startsWith(AlluxioURI.SEPARATOR) ? AlluxioURI.SEPARATOR + copySource : copySource));
FileOutStream out = fs.createFile(objectURI)) {
MessageDigest md5 = MessageDigest.getInstance("MD5");
try (DigestOutputStream digestOut = new DigestOutputStream(out, md5)) {
IOUtils.copyLarge(in, digestOut, new byte[8 * Constants.MB]);
byte[] digest = md5.digest();
String entityTag = Hex.encodeHexString(digest);
return new CopyObjectResult(entityTag, System.currentTimeMillis());
} catch (IOException e) {
try {
out.cancel();
} catch (Throwable t2) {
e.addSuppressed(t2);
}
throw e;
}
} catch (Exception e) {
throw S3RestUtils.toObjectS3Exception(e, objectPath);
}
}
});
}
use of alluxio.grpc.CreateFilePOptions in project alluxio by Alluxio.
the class S3RestServiceHandler method completeMultipartUpload.
// TODO(cc): support the options in the XML request body defined in
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html, currently, the parts
// under the temporary multipart upload directory are combined into the final object.
private Response completeMultipartUpload(final FileSystem fs, final String bucket, final String object, final long uploadId) {
return S3RestUtils.call(bucket, () -> {
String bucketPath = S3RestUtils.parsePath(AlluxioURI.SEPARATOR + bucket);
S3RestUtils.checkPathIsAlluxioDirectory(fs, bucketPath);
String objectPath = bucketPath + AlluxioURI.SEPARATOR + object;
AlluxioURI multipartTemporaryDir = new AlluxioURI(S3RestUtils.getMultipartTemporaryDirForObject(bucketPath, object));
S3RestUtils.checkUploadId(fs, multipartTemporaryDir, uploadId);
try {
List<URIStatus> parts = fs.listStatus(multipartTemporaryDir);
parts.sort(new S3RestUtils.URIStatusNameComparator());
CreateFilePOptions options = CreateFilePOptions.newBuilder().setRecursive(true).setWriteType(S3RestUtils.getS3WriteType()).build();
FileOutStream os = fs.createFile(new AlluxioURI(objectPath), options);
MessageDigest md5 = MessageDigest.getInstance("MD5");
try (DigestOutputStream digestOutputStream = new DigestOutputStream(os, md5)) {
for (URIStatus part : parts) {
try (FileInStream is = fs.openFile(new AlluxioURI(part.getPath()))) {
ByteStreams.copy(is, digestOutputStream);
}
}
}
fs.delete(multipartTemporaryDir, DeletePOptions.newBuilder().setRecursive(true).build());
MultipartUploadCleaner.cancelAbort(fs, bucket, object, uploadId);
String entityTag = Hex.encodeHexString(md5.digest());
return new CompleteMultipartUploadResult(objectPath, bucket, object, entityTag);
} catch (Exception e) {
throw S3RestUtils.toObjectS3Exception(e, objectPath);
}
});
}
Aggregations