use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.
the class TestRpcProgramNfs3 method testLookup.
@Test(timeout = 60000)
public void testLookup() throws Exception {
HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
long dirId = status.getFileId();
FileHandle handle = new FileHandle(dirId);
LOOKUP3Request lookupReq = new LOOKUP3Request(handle, "bar");
XDR xdr_req = new XDR();
lookupReq.serialize(xdr_req);
// Attempt by an unpriviledged user should fail.
LOOKUP3Response response1 = nfsd.lookup(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
// Attempt by a priviledged user should pass.
LOOKUP3Response response2 = nfsd.lookup(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code", Nfs3Status.NFS3_OK, response2.getStatus());
}
use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.
the class TestRpcProgramNfs3 method createFileUsingNfs.
private void createFileUsingNfs(String fileName, byte[] buffer) throws Exception {
DFSTestUtil.createFile(hdfs, new Path(fileName), 0, (short) 1, 0);
final HdfsFileStatus status = nn.getRpcServer().getFileInfo(fileName);
final long dirId = status.getFileId();
final FileHandle handle = new FileHandle(dirId);
final WRITE3Request writeReq = new WRITE3Request(handle, 0, buffer.length, WriteStableHow.DATA_SYNC, ByteBuffer.wrap(buffer));
final XDR xdr_req = new XDR();
writeReq.serialize(xdr_req);
final WRITE3Response response = nfsd.write(xdr_req.asReadOnlyWrap(), null, 1, securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect response: ", null, response);
}
use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.
the class TestRpcProgramNfs3 method testSetattr.
@Test(timeout = 60000)
public void testSetattr() throws Exception {
HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
long dirId = status.getFileId();
XDR xdr_req = new XDR();
FileHandle handle = new FileHandle(dirId);
SetAttr3 symAttr = new SetAttr3(0, 1, 0, 0, null, null, EnumSet.of(SetAttrField.UID));
SETATTR3Request req = new SETATTR3Request(handle, symAttr, false, null);
req.serialize(xdr_req);
// Attempt by an unprivileged user should fail.
SETATTR3Response response1 = nfsd.setattr(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
// Attempt by a priviledged user should pass.
SETATTR3Response response2 = nfsd.setattr(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code", Nfs3Status.NFS3_OK, response2.getStatus());
}
use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.
the class TestRpcProgramNfs3 method testWrite.
@Test(timeout = 60000)
public void testWrite() throws Exception {
HdfsFileStatus status = nn.getRpcServer().getFileInfo("/tmp/bar");
long dirId = status.getFileId();
FileHandle handle = new FileHandle(dirId);
byte[] buffer = new byte[10];
for (int i = 0; i < 10; i++) {
buffer[i] = (byte) i;
}
WRITE3Request writeReq = new WRITE3Request(handle, 0, 10, WriteStableHow.DATA_SYNC, ByteBuffer.wrap(buffer));
XDR xdr_req = new XDR();
writeReq.serialize(xdr_req);
// Attempt by an unpriviledged user should fail.
WRITE3Response response1 = nfsd.write(xdr_req.asReadOnlyWrap(), null, 1, securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES, response1.getStatus());
// Attempt by a priviledged user should pass.
WRITE3Response response2 = nfsd.write(xdr_req.asReadOnlyWrap(), null, 1, securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect response:", null, response2);
}
use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.
the class TestRpcProgramNfs3 method commit.
private void commit(String fileName, int len) throws Exception {
final HdfsFileStatus status = nn.getRpcServer().getFileInfo(fileName);
final long dirId = status.getFileId();
final FileHandle handle = new FileHandle(dirId);
final XDR xdr_req = new XDR();
final COMMIT3Request req = new COMMIT3Request(handle, 0, len);
req.serialize(xdr_req);
Channel ch = Mockito.mock(Channel.class);
COMMIT3Response response2 = nfsd.commit(xdr_req.asReadOnlyWrap(), ch, 1, securityHandler, new InetSocketAddress("localhost", 1234));
assertEquals("Incorrect COMMIT3Response:", null, response2);
}
Aggregations