use of org.apache.hadoop.ipc.RemoteException in project hadoop by apache.
the class TestSnapshotNameWithInvalidCharacters method TestSnapshotWithInvalidName.
@Test(timeout = 600000)
public void TestSnapshotWithInvalidName() throws Exception {
Path file1 = new Path(dir1, file1Name);
DFSTestUtil.createFile(hdfs, file1, BLOCKSIZE, REPLICATION, SEED);
hdfs.allowSnapshot(dir1);
try {
hdfs.createSnapshot(dir1, snapshot1);
} catch (RemoteException e) {
}
}
use of org.apache.hadoop.ipc.RemoteException in project hadoop by apache.
the class TestSnapshotNameWithInvalidCharacters method TestSnapshotWithInvalidName1.
@Test(timeout = 60000)
public void TestSnapshotWithInvalidName1() throws Exception {
Path file1 = new Path(dir1, file1Name);
DFSTestUtil.createFile(hdfs, file1, BLOCKSIZE, REPLICATION, SEED);
hdfs.allowSnapshot(dir1);
try {
hdfs.createSnapshot(dir1, snapshot2);
} catch (RemoteException e) {
}
}
use of org.apache.hadoop.ipc.RemoteException in project hadoop by apache.
the class DFSClient method createSymlink.
/**
* Creates a symbolic link.
*
* @see ClientProtocol#createSymlink(String, String,FsPermission, boolean)
*/
public void createSymlink(String target, String link, boolean createParent) throws IOException {
checkOpen();
try (TraceScope ignored = newPathTraceScope("createSymlink", target)) {
final FsPermission dirPerm = applyUMask(null);
namenode.createSymlink(target, link, dirPerm, createParent);
} catch (RemoteException re) {
throw re.unwrapRemoteException(AccessControlException.class, FileAlreadyExistsException.class, FileNotFoundException.class, ParentNotDirectoryException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, QuotaByStorageTypeExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class);
}
}
use of org.apache.hadoop.ipc.RemoteException in project hadoop by apache.
the class DFSClient method primitiveMkdir.
/**
* Same {{@link #mkdirs(String, FsPermission, boolean)} except
* that the permissions has already been masked against umask.
*/
public boolean primitiveMkdir(String src, FsPermission absPermission, boolean createParent) throws IOException {
checkOpen();
if (absPermission == null) {
absPermission = applyUMaskDir(null);
}
LOG.debug("{}: masked={}", src, absPermission);
try (TraceScope ignored = tracer.newScope("mkdir")) {
return namenode.mkdirs(src, absPermission, createParent);
} catch (RemoteException re) {
throw re.unwrapRemoteException(AccessControlException.class, InvalidPathException.class, FileAlreadyExistsException.class, FileNotFoundException.class, ParentNotDirectoryException.class, SafeModeException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, QuotaByStorageTypeExceededException.class, UnresolvedPathException.class, SnapshotAccessControlException.class);
}
}
use of org.apache.hadoop.ipc.RemoteException in project hadoop by apache.
the class DFSClient method callAppend.
/** Method to get stream returned by append call */
private DFSOutputStream callAppend(String src, EnumSet<CreateFlag> flag, Progressable progress, String[] favoredNodes) throws IOException {
CreateFlag.validateForAppend(flag);
try {
final LastBlockWithStatus blkWithStatus = callAppend(src, new EnumSetWritable<>(flag, CreateFlag.class));
HdfsFileStatus status = blkWithStatus.getFileStatus();
if (status == null) {
LOG.debug("NameNode is on an older version, request file " + "info with additional RPC call for file: {}", src);
status = getFileInfo(src);
}
return DFSOutputStream.newStreamForAppend(this, src, flag, progress, blkWithStatus.getLastBlock(), status, dfsClientConf.createChecksum(null), favoredNodes);
} catch (RemoteException re) {
throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, SafeModeException.class, DSQuotaExceededException.class, QuotaByStorageTypeExceededException.class, UnsupportedOperationException.class, UnresolvedPathException.class, SnapshotAccessControlException.class);
}
}
Aggregations