Search in sources :

Example 36 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class TestRpcProgramNfs3 method testPathconf.

@Test(timeout = 60000)
public void testPathconf() throws Exception {
    HdfsFileStatus status = nn.getRpcServer().getFileInfo("/tmp/bar");
    long dirId = status.getFileId();
    FileHandle handle = new FileHandle(dirId);
    XDR xdr_req = new XDR();
    PATHCONF3Request req = new PATHCONF3Request(handle);
    req.serialize(xdr_req);
    // Attempt by an unpriviledged user should fail.
    PATHCONF3Response response1 = nfsd.pathconf(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
    // Attempt by a priviledged user should pass.
    PATHCONF3Response response2 = nfsd.pathconf(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus());
}
Also used : PATHCONF3Request(org.apache.hadoop.nfs.nfs3.request.PATHCONF3Request) FileHandle(org.apache.hadoop.nfs.nfs3.FileHandle) InetSocketAddress(java.net.InetSocketAddress) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) XDR(org.apache.hadoop.oncrpc.XDR) PATHCONF3Response(org.apache.hadoop.nfs.nfs3.response.PATHCONF3Response) Test(org.junit.Test)

Example 37 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class TestRpcProgramNfs3 method testRead.

@Test(timeout = 60000)
public void testRead() throws Exception {
    HdfsFileStatus status = nn.getRpcServer().getFileInfo("/tmp/bar");
    long dirId = status.getFileId();
    FileHandle handle = new FileHandle(dirId);
    READ3Request readReq = new READ3Request(handle, 0, 5);
    XDR xdr_req = new XDR();
    readReq.serialize(xdr_req);
    // Attempt by an unpriviledged user should fail.
    READ3Response response1 = nfsd.read(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
    // Attempt by a priviledged user should pass.
    READ3Response response2 = nfsd.read(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus());
}
Also used : READ3Request(org.apache.hadoop.nfs.nfs3.request.READ3Request) FileHandle(org.apache.hadoop.nfs.nfs3.FileHandle) InetSocketAddress(java.net.InetSocketAddress) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) XDR(org.apache.hadoop.oncrpc.XDR) READ3Response(org.apache.hadoop.nfs.nfs3.response.READ3Response) Test(org.junit.Test)

Example 38 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class TestRpcProgramNfs3 method testReadlink.

@Test(timeout = 60000)
public void testReadlink() throws Exception {
    // Create a symlink first.
    HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
    long dirId = status.getFileId();
    XDR xdr_req = new XDR();
    FileHandle handle = new FileHandle(dirId);
    SYMLINK3Request req = new SYMLINK3Request(handle, "fubar", new SetAttr3(), "bar");
    req.serialize(xdr_req);
    SYMLINK3Response response = nfsd.symlink(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response.getStatus());
    // Now perform readlink operations.
    FileHandle handle2 = response.getObjFileHandle();
    XDR xdr_req2 = new XDR();
    READLINK3Request req2 = new READLINK3Request(handle2);
    req2.serialize(xdr_req2);
    // Attempt by an unpriviledged user should fail.
    READLINK3Response response1 = nfsd.readlink(xdr_req2.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
    // Attempt by a priviledged user should pass.
    READLINK3Response response2 = nfsd.readlink(xdr_req2.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus());
}
Also used : SYMLINK3Request(org.apache.hadoop.nfs.nfs3.request.SYMLINK3Request) SetAttr3(org.apache.hadoop.nfs.nfs3.request.SetAttr3) READLINK3Request(org.apache.hadoop.nfs.nfs3.request.READLINK3Request) FileHandle(org.apache.hadoop.nfs.nfs3.FileHandle) InetSocketAddress(java.net.InetSocketAddress) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) XDR(org.apache.hadoop.oncrpc.XDR) READLINK3Response(org.apache.hadoop.nfs.nfs3.response.READLINK3Response) SYMLINK3Response(org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response) Test(org.junit.Test)

Example 39 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class TestRpcProgramNfs3 method testReaddir.

@Test(timeout = 60000)
public void testReaddir() throws Exception {
    HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
    long dirId = status.getFileId();
    FileHandle handle = new FileHandle(dirId);
    XDR xdr_req = new XDR();
    READDIR3Request req = new READDIR3Request(handle, 0, 0, 100);
    req.serialize(xdr_req);
    // Attempt by an unpriviledged user should fail.
    READDIR3Response response1 = nfsd.readdir(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
    // Attempt by a priviledged user should pass.
    READDIR3Response response2 = nfsd.readdir(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus());
}
Also used : READDIR3Request(org.apache.hadoop.nfs.nfs3.request.READDIR3Request) FileHandle(org.apache.hadoop.nfs.nfs3.FileHandle) READDIR3Response(org.apache.hadoop.nfs.nfs3.response.READDIR3Response) InetSocketAddress(java.net.InetSocketAddress) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) XDR(org.apache.hadoop.oncrpc.XDR) Test(org.junit.Test)

Example 40 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class TestRpcProgramNfs3 method getFileContentsUsingNfs.

private byte[] getFileContentsUsingNfs(String fileName, int len) throws Exception {
    final HdfsFileStatus status = nn.getRpcServer().getFileInfo(fileName);
    final long dirId = status.getFileId();
    final FileHandle handle = new FileHandle(dirId);
    final READ3Request readReq = new READ3Request(handle, 0, len);
    final XDR xdr_req = new XDR();
    readReq.serialize(xdr_req);
    final READ3Response response = nfsd.read(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
    assertEquals("Incorrect return code: ", Nfs3Status.NFS3_OK, response.getStatus());
    assertTrue("expected full read", response.isEof());
    return response.getData().array();
}
Also used : READ3Request(org.apache.hadoop.nfs.nfs3.request.READ3Request) FileHandle(org.apache.hadoop.nfs.nfs3.FileHandle) InetSocketAddress(java.net.InetSocketAddress) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) XDR(org.apache.hadoop.oncrpc.XDR) READ3Response(org.apache.hadoop.nfs.nfs3.response.READ3Response)

Aggregations

HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)124 Test (org.junit.Test)51 FileHandle (org.apache.hadoop.nfs.nfs3.FileHandle)34 IOException (java.io.IOException)28 InetSocketAddress (java.net.InetSocketAddress)28 XDR (org.apache.hadoop.oncrpc.XDR)28 AccessControlException (org.apache.hadoop.security.AccessControlException)26 Path (org.apache.hadoop.fs.Path)23 SnapshotAccessControlException (org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)23 FileNotFoundException (java.io.FileNotFoundException)16 DFSClient (org.apache.hadoop.hdfs.DFSClient)11 DirectoryListing (org.apache.hadoop.hdfs.protocol.DirectoryListing)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)10 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)9 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)9 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)8 SetAttr3 (org.apache.hadoop.nfs.nfs3.request.SetAttr3)8 FileStatus (org.apache.hadoop.fs.FileStatus)7 Matchers.anyString (org.mockito.Matchers.anyString)7 Configuration (org.apache.hadoop.conf.Configuration)6