use of org.apache.hadoop.nfs.nfs3.request.REMOVE3Request in project hadoop by apache.
the class RpcProgramNfs3 method remove.
@VisibleForTesting
REMOVE3Response remove(XDR xdr, SecurityHandler securityHandler, SocketAddress remoteAddress) {
REMOVE3Response response = new REMOVE3Response(Nfs3Status.NFS3_OK);
DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
if (dfsClient == null) {
response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
return response;
}
REMOVE3Request request;
try {
request = REMOVE3Request.deserialize(xdr);
} catch (IOException e) {
LOG.error("Invalid REMOVE request");
return new REMOVE3Response(Nfs3Status.NFS3ERR_INVAL);
}
FileHandle dirHandle = request.getHandle();
String fileName = request.getName();
if (LOG.isDebugEnabled()) {
LOG.debug("NFS REMOVE dir fileId: " + dirHandle.getFileId() + " fileName: " + fileName + " client: " + remoteAddress);
}
String dirFileIdPath = Nfs3Utils.getFileIdPath(dirHandle);
Nfs3FileAttributes preOpDirAttr = null;
Nfs3FileAttributes postOpDirAttr = null;
try {
preOpDirAttr = Nfs3Utils.getFileAttr(dfsClient, dirFileIdPath, iug);
if (preOpDirAttr == null) {
LOG.info("Can't get path for dir fileId: " + dirHandle.getFileId());
return new REMOVE3Response(Nfs3Status.NFS3ERR_STALE);
}
WccData errWcc = new WccData(Nfs3Utils.getWccAttr(preOpDirAttr), preOpDirAttr);
if (!checkAccessPrivilege(remoteAddress, AccessPrivilege.READ_WRITE)) {
return new REMOVE3Response(Nfs3Status.NFS3ERR_ACCES, errWcc);
}
String fileIdPath = dirFileIdPath + "/" + fileName;
HdfsFileStatus fstat = Nfs3Utils.getFileStatus(dfsClient, fileIdPath);
if (fstat == null) {
return new REMOVE3Response(Nfs3Status.NFS3ERR_NOENT, errWcc);
}
if (fstat.isDir()) {
return new REMOVE3Response(Nfs3Status.NFS3ERR_ISDIR, errWcc);
}
boolean result = dfsClient.delete(fileIdPath, false);
WccData dirWcc = Nfs3Utils.createWccData(Nfs3Utils.getWccAttr(preOpDirAttr), dfsClient, dirFileIdPath, iug);
if (!result) {
return new REMOVE3Response(Nfs3Status.NFS3ERR_ACCES, dirWcc);
}
return new REMOVE3Response(Nfs3Status.NFS3_OK, dirWcc);
} catch (IOException e) {
LOG.warn("Exception ", e);
// Try to return correct WccData
if (postOpDirAttr == null) {
try {
postOpDirAttr = Nfs3Utils.getFileAttr(dfsClient, dirFileIdPath, iug);
} catch (IOException e1) {
LOG.info("Can't get postOpDirAttr for " + dirFileIdPath, e1);
}
}
WccData dirWcc = new WccData(Nfs3Utils.getWccAttr(preOpDirAttr), postOpDirAttr);
int status = mapErrorStatus(e);
return new REMOVE3Response(status, dirWcc);
}
}
use of org.apache.hadoop.nfs.nfs3.request.REMOVE3Request in project hadoop by apache.
the class TestRpcProgramNfs3 method testRemove.
@Test(timeout = 60000)
public void testRemove() throws Exception {
HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
long dirId = status.getFileId();
XDR xdr_req = new XDR();
FileHandle handle = new FileHandle(dirId);
REMOVE3Request req = new REMOVE3Request(handle, "bar");
req.serialize(xdr_req);
// Attempt by an unpriviledged user should fail.
REMOVE3Response response1 = nfsd.remove(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
// Attempt by a priviledged user should pass.
REMOVE3Response response2 = nfsd.remove(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK, response2.getStatus());
}
use of org.apache.hadoop.nfs.nfs3.request.REMOVE3Request in project hadoop by apache.
the class REMOVE3Request method deserialize.
public static REMOVE3Request deserialize(XDR xdr) throws IOException {
FileHandle handle = readHandle(xdr);
String name = xdr.readString();
return new REMOVE3Request(handle, name);
}
Aggregations