use of org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response in project hadoop by apache.
the class SYMLINK3Response method deserialize.
public static SYMLINK3Response deserialize(XDR xdr) {
int status = xdr.readInt();
FileHandle objFileHandle = new FileHandle();
Nfs3FileAttributes objPostOpAttr = null;
WccData dirWcc;
if (status == Nfs3Status.NFS3_OK) {
xdr.readBoolean();
objFileHandle.deserialize(xdr);
xdr.readBoolean();
objPostOpAttr = Nfs3FileAttributes.deserialize(xdr);
}
dirWcc = WccData.deserialize(xdr);
return new SYMLINK3Response(status, objFileHandle, objPostOpAttr, dirWcc);
}
use of org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response in project hadoop by apache.
the class RpcProgramNfs3 method symlink.
@VisibleForTesting
SYMLINK3Response symlink(XDR xdr, SecurityHandler securityHandler, SocketAddress remoteAddress) {
SYMLINK3Response response = new SYMLINK3Response(Nfs3Status.NFS3_OK);
if (!checkAccessPrivilege(remoteAddress, AccessPrivilege.READ_WRITE)) {
response.setStatus(Nfs3Status.NFS3ERR_ACCES);
return response;
}
DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
if (dfsClient == null) {
response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
return response;
}
SYMLINK3Request request;
try {
request = SYMLINK3Request.deserialize(xdr);
} catch (IOException e) {
LOG.error("Invalid SYMLINK request");
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
return response;
}
FileHandle dirHandle = request.getHandle();
String name = request.getName();
String symData = request.getSymData();
String linkDirIdPath = Nfs3Utils.getFileIdPath(dirHandle);
// Don't do any name check to source path, just leave it to HDFS
String linkIdPath = linkDirIdPath + "/" + name;
if (LOG.isDebugEnabled()) {
LOG.debug("NFS SYMLINK, target: " + symData + " link: " + linkIdPath + " client: " + remoteAddress);
}
try {
WccData dirWcc = response.getDirWcc();
WccAttr preOpAttr = Nfs3Utils.getWccAttr(dfsClient, linkDirIdPath);
dirWcc.setPreOpAttr(preOpAttr);
dfsClient.createSymlink(symData, linkIdPath, false);
// Set symlink attr is considered as to change the attr of the target
// file. So no need to set symlink attr here after it's created.
HdfsFileStatus linkstat = dfsClient.getFileLinkInfo(linkIdPath);
Nfs3FileAttributes objAttr = Nfs3Utils.getNfs3FileAttrFromFileStatus(linkstat, iug);
dirWcc.setPostOpAttr(Nfs3Utils.getFileAttr(dfsClient, linkDirIdPath, iug));
return new SYMLINK3Response(Nfs3Status.NFS3_OK, new FileHandle(objAttr.getFileId()), objAttr, dirWcc);
} catch (IOException e) {
LOG.warn("Exception: " + e);
int status = mapErrorStatus(e);
response.setStatus(status);
return response;
}
}
use of org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response in project hadoop by apache.
the class TestRpcProgramNfs3 method testReadlink.
@Test(timeout = 60000)
public void testReadlink() throws Exception {
// Create a symlink first.
HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
long dirId = status.getFileId();
XDR xdr_req = new XDR();
FileHandle handle = new FileHandle(dirId);
SYMLINK3Request req = new SYMLINK3Request(handle, "fubar", new SetAttr3(), "bar");
req.serialize(xdr_req);
SYMLINK3Response response = nfsd.symlink(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response.getStatus());
// Now perform readlink operations.
FileHandle handle2 = response.getObjFileHandle();
XDR xdr_req2 = new XDR();
READLINK3Request req2 = new READLINK3Request(handle2);
req2.serialize(xdr_req2);
// Attempt by an unpriviledged user should fail.
READLINK3Response response1 = nfsd.readlink(xdr_req2.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
// Attempt by a priviledged user should pass.
READLINK3Response response2 = nfsd.readlink(xdr_req2.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus());
}
use of org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response in project hadoop by apache.
the class TestRpcProgramNfs3 method testSymlink.
@Test(timeout = 60000)
public void testSymlink() throws Exception {
HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
long dirId = status.getFileId();
XDR xdr_req = new XDR();
FileHandle handle = new FileHandle(dirId);
SYMLINK3Request req = new SYMLINK3Request(handle, "fubar", new SetAttr3(), "bar");
req.serialize(xdr_req);
// Attempt by an unprivileged user should fail.
SYMLINK3Response response1 = nfsd.symlink(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
// Attempt by a privileged user should pass.
SYMLINK3Response response2 = nfsd.symlink(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus());
}
Aggregations