use of org.apache.commons.vfs2.provider.VfsComponentContext in project big-data-plugin by pentaho.
the class S3AFileNameParserTest method testParseUri.
@Test
public void testParseUri() throws Exception {
VfsComponentContext context = mock(VfsComponentContext.class);
FileName fileName = mock(FileName.class);
String uri = "s3a://bucket/file";
FileName noBaseFile = parser.parseUri(context, null, uri);
assertNotNull(noBaseFile);
assertEquals("bucket", ((S3AFileName) noBaseFile).getBucketId());
FileName withBaseFile = parser.parseUri(context, fileName, uri);
assertNotNull(withBaseFile);
assertEquals("bucket", ((S3AFileName) withBaseFile).getBucketId());
// assumption is that the whole URL is valid until it comes time to resolve to S3 objects
uri = "s3a://s3a/bucket/file";
withBaseFile = parser.parseUri(context, fileName, uri);
assertEquals("s3a", ((S3AFileName) withBaseFile).getBucketId());
// with credentials
uri = "s3a://ThiSiSA+PossibleAcce/ssK3y:PossiblES3cre+K3y@s3a/bucket/file";
withBaseFile = parser.parseUri(context, fileName, uri);
assertEquals("ThiSiSA+PossibleAcce/ssK3y:PossiblES3cre+K3y@s3a", ((S3AFileName) withBaseFile).getBucketId());
}
use of org.apache.commons.vfs2.provider.VfsComponentContext in project big-data-plugin by pentaho.
the class S3NFileObjectTest method setUp.
@Before
public void setUp() throws Exception {
s3ServiceMock = mock(AmazonS3.class);
S3Object s3Object = new S3Object();
s3Object.setKey(OBJECT_NAME);
s3Object.setBucketName(BUCKET_NAME);
filename = new S3NFileName(SCHEME, BUCKET_NAME, "/" + BUCKET_NAME, FileType.FOLDER);
S3NFileName rootFileName = new S3NFileName(SCHEME, BUCKET_NAME, "", FileType.FOLDER);
S3NFileSystem fileSystem = new S3NFileSystem(rootFileName, new FileSystemOptions());
fileSystemSpy = spy(fileSystem);
VfsComponentContext context = mock(VfsComponentContext.class);
final DefaultFileSystemManager fsm = new DefaultFileSystemManager();
FilesCache cache = mock(FilesCache.class);
fsm.setFilesCache(cache);
fsm.setCacheStrategy(CacheStrategy.ON_RESOLVE);
when(context.getFileSystemManager()).thenReturn(fsm);
fileSystemSpy.setContext(context);
S3NFileObject s3FileObject = new S3NFileObject(filename, fileSystemSpy);
s3FileObjectBucketSpy = spy(s3FileObject);
s3FileObjectFileSpy = spy(new S3NFileObject(new S3NFileName(SCHEME, BUCKET_NAME, "/" + BUCKET_NAME + "/" + origKey, FileType.IMAGINARY), fileSystemSpy));
S3NFileObject s3FileObjectRoot = new S3NFileObject(rootFileName, fileSystemSpy);
s3FileObjectSpyRoot = spy(s3FileObjectRoot);
// specify the behaviour of S3 Service
// when( s3ServiceMock.getBucket( BUCKET_NAME ) ).thenReturn( testBucket );
when(s3ServiceMock.getObject(BUCKET_NAME, OBJECT_NAME)).thenReturn(s3Object);
when(s3ServiceMock.getObject(BUCKET_NAME, OBJECT_NAME)).thenReturn(s3Object);
when(s3ServiceMock.listBuckets()).thenReturn(createBuckets());
when(s3ServiceMock.doesBucketExistV2(BUCKET_NAME)).thenReturn(true);
childObjectListing = mock(ObjectListing.class);
when(childObjectListing.getObjectSummaries()).thenReturn(createObjectSummaries(0)).thenReturn(new ArrayList<>());
when(childObjectListing.getCommonPrefixes()).thenReturn(new ArrayList<>()).thenReturn(createCommonPrefixes(3));
when(childObjectListing.isTruncated()).thenReturn(true).thenReturn(false);
when(s3ServiceMock.listObjects(any(ListObjectsRequest.class))).thenReturn(childObjectListing);
when(s3ServiceMock.listObjects(anyString(), anyString())).thenReturn(childObjectListing);
when(s3ServiceMock.listNextBatchOfObjects(any(ObjectListing.class))).thenReturn(childObjectListing);
s3ObjectMock = mock(S3Object.class);
s3ObjectInputStream = mock(S3ObjectInputStream.class);
s3ObjectMetadata = mock(ObjectMetadata.class);
when(s3ObjectMock.getObjectContent()).thenReturn(s3ObjectInputStream);
when(s3ServiceMock.getObjectMetadata(anyString(), anyString())).thenReturn(s3ObjectMetadata);
when(s3ObjectMetadata.getContentLength()).thenReturn(contentLength);
when(s3ObjectMetadata.getLastModified()).thenReturn(testDate);
when(s3ServiceMock.getObject(anyString(), anyString())).thenReturn(s3ObjectMock);
when(fileSystemSpy.getS3Client()).thenReturn(s3ServiceMock);
}
use of org.apache.commons.vfs2.provider.VfsComponentContext in project big-data-plugin by pentaho.
the class S3FileNameParser method parseUri.
public FileName parseUri(VfsComponentContext context, FileName base, String uri) throws FileSystemException {
StringBuilder buffer = new StringBuilder();
String scheme = UriParser.extractScheme(uri, buffer);
UriParser.canonicalizePath(buffer, 0, buffer.length(), this);
// Normalize separators in the path
UriParser.fixSeparators(buffer);
// Normalise the path
FileType fileType = UriParser.normalisePath(buffer);
// URI includes credentials
String keys = S3Util.getFullKeysFromURI(buffer.toString());
if (keys != null) {
buffer.replace(buffer.indexOf(keys), buffer.indexOf(keys) + keys.length(), "");
}
String path = buffer.toString();
// Extract bucket name
String bucketName = UriParser.extractFirstElement(buffer);
if (keys != null) {
bucketName = keys + bucketName;
return new S3FileName(scheme, bucketName, buffer.length() == 0 ? path : buffer.toString(), fileType, keys);
}
return new S3FileName(scheme, bucketName, path, fileType);
}
use of org.apache.commons.vfs2.provider.VfsComponentContext in project big-data-plugin by pentaho.
the class S3AFileNameParser method parseUri.
public FileName parseUri(VfsComponentContext context, FileName base, String uri) throws FileSystemException {
StringBuilder buffer = new StringBuilder();
String scheme = UriParser.extractScheme(uri, buffer);
UriParser.canonicalizePath(buffer, 0, buffer.length(), this);
// Normalize separators in the path
UriParser.fixSeparators(buffer);
// Normalise the path
FileType fileType = UriParser.normalisePath(buffer);
// URI includes credentials
String keys = S3Util.getFullKeysFromURI(buffer.toString());
if (keys != null) {
buffer.replace(buffer.indexOf(keys), buffer.indexOf(keys) + keys.length(), "");
}
String path = buffer.toString();
// Extract bucket name
String bucketName = UriParser.extractFirstElement(buffer);
if (keys != null) {
bucketName = keys + bucketName;
return new S3AFileName(scheme, bucketName, buffer.length() == 0 ? path : buffer.toString(), fileType, keys);
}
return new S3AFileName(scheme, bucketName, path, fileType);
}
use of org.apache.commons.vfs2.provider.VfsComponentContext in project big-data-plugin by pentaho.
the class S3NFileNameParser method parseUri.
public FileName parseUri(VfsComponentContext context, FileName base, String uri) throws FileSystemException {
StringBuilder buffer = new StringBuilder();
String scheme = UriParser.extractScheme(uri, buffer);
UriParser.canonicalizePath(buffer, 0, buffer.length(), this);
// Normalize separators in the path
UriParser.fixSeparators(buffer);
// Normalise the path
FileType fileType = UriParser.normalisePath(buffer);
// URI includes credentials
String keys = S3Util.getFullKeysFromURI(buffer.toString());
if (keys != null) {
buffer.replace(buffer.indexOf(keys), buffer.indexOf(keys) + keys.length(), "");
}
String path = buffer.toString();
// Extract bucket name
String bucketName = UriParser.extractFirstElement(buffer);
if (keys != null) {
bucketName = keys + bucketName;
return new S3NFileName(scheme, bucketName, buffer.length() == 0 ? path : buffer.toString(), fileType, keys);
}
return new S3NFileName(scheme, bucketName, path, fileType);
}
Aggregations