use of com.jcraft.jsch.SftpException in project hadoop by apache.
the class SFTPFileSystem method rename.
/**
* Convenience method, so that we don't open a new connection when using this
* method from within another method. Otherwise every API invocation incurs
* the overhead of opening/closing a TCP connection.
*
* @param channel
* @param src
* @param dst
* @return rename successful?
* @throws IOException
*/
private boolean rename(ChannelSftp channel, Path src, Path dst) throws IOException {
Path workDir;
try {
workDir = new Path(channel.pwd());
} catch (SftpException e) {
throw new IOException(e);
}
Path absoluteSrc = makeAbsolute(workDir, src);
Path absoluteDst = makeAbsolute(workDir, dst);
if (!exists(channel, absoluteSrc)) {
throw new IOException(String.format(E_SPATH_NOTEXIST, src));
}
if (exists(channel, absoluteDst)) {
throw new IOException(String.format(E_DPATH_EXIST, dst));
}
boolean renamed = true;
try {
channel.cd("/");
channel.rename(src.toUri().getPath(), dst.toUri().getPath());
} catch (SftpException e) {
renamed = false;
}
return renamed;
}
use of com.jcraft.jsch.SftpException in project hadoop by apache.
the class SFTPFileSystem method create.
/**
* A stream obtained via this call must be closed before using other APIs of
* this class or else the invocation will block.
*/
@Override
public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException {
final ChannelSftp client = connect();
Path workDir;
try {
workDir = new Path(client.pwd());
} catch (SftpException e) {
throw new IOException(e);
}
Path absolute = makeAbsolute(workDir, f);
if (exists(client, f)) {
if (overwrite) {
delete(client, f, false);
} else {
disconnect(client);
throw new IOException(String.format(E_FILE_EXIST, f));
}
}
Path parent = absolute.getParent();
if (parent == null || !mkdirs(client, parent, FsPermission.getDefault())) {
parent = (parent == null) ? new Path("/") : parent;
disconnect(client);
throw new IOException(String.format(E_CREATE_DIR, parent));
}
OutputStream os;
try {
client.cd(parent.toUri().getPath());
os = client.put(f.getName());
} catch (SftpException e) {
throw new IOException(e);
}
FSDataOutputStream fos = new FSDataOutputStream(os, statistics) {
@Override
public void close() throws IOException {
super.close();
disconnect(client);
}
};
return fos;
}
use of com.jcraft.jsch.SftpException in project hadoop by apache.
the class SFTPFileSystem method mkdirs.
/**
* Convenience method, so that we don't open a new connection when using this
* method from within another method. Otherwise every API invocation incurs
* the overhead of opening/closing a TCP connection.
*/
private boolean mkdirs(ChannelSftp client, Path file, FsPermission permission) throws IOException {
boolean created = true;
Path workDir;
try {
workDir = new Path(client.pwd());
} catch (SftpException e) {
throw new IOException(e);
}
Path absolute = makeAbsolute(workDir, file);
String pathName = absolute.getName();
if (!exists(client, absolute)) {
Path parent = absolute.getParent();
created = (parent == null || mkdirs(client, parent, FsPermission.getDefault()));
if (created) {
String parentDir = parent.toUri().getPath();
boolean succeeded = true;
try {
client.cd(parentDir);
client.mkdir(pathName);
} catch (SftpException e) {
throw new IOException(String.format(E_MAKE_DIR_FORPATH, pathName, parentDir));
}
created = created & succeeded;
}
} else if (isFile(client, absolute)) {
throw new IOException(String.format(E_DIR_CREATE_FROMFILE, absolute));
}
return created;
}
use of com.jcraft.jsch.SftpException in project hadoop by apache.
the class SFTPFileSystem method delete.
/**
* Convenience method, so that we don't open a new connection when using this
* method from within another method. Otherwise every API invocation incurs
* the overhead of opening/closing a TCP connection.
*/
private boolean delete(ChannelSftp channel, Path file, boolean recursive) throws IOException {
Path workDir;
try {
workDir = new Path(channel.pwd());
} catch (SftpException e) {
throw new IOException(e);
}
Path absolute = makeAbsolute(workDir, file);
String pathName = absolute.toUri().getPath();
FileStatus fileStat = null;
try {
fileStat = getFileStatus(channel, absolute);
} catch (FileNotFoundException e) {
// file not found, no need to delete, return true
return false;
}
if (!fileStat.isDirectory()) {
boolean status = true;
try {
channel.rm(pathName);
} catch (SftpException e) {
status = false;
}
return status;
} else {
boolean status = true;
FileStatus[] dirEntries = listStatus(channel, absolute);
if (dirEntries != null && dirEntries.length > 0) {
if (!recursive) {
throw new IOException(String.format(E_DIR_NOTEMPTY, file));
}
for (int i = 0; i < dirEntries.length; ++i) {
delete(channel, new Path(absolute, dirEntries[i].getPath()), recursive);
}
}
try {
channel.rmdir(pathName);
} catch (SftpException e) {
status = false;
}
return status;
}
}
use of com.jcraft.jsch.SftpException in project camel by apache.
the class SftpOperations method retrieveFileToFileInLocalWorkDirectory.
@SuppressWarnings("unchecked")
private boolean retrieveFileToFileInLocalWorkDirectory(String name, Exchange exchange) throws GenericFileOperationFailedException {
File temp;
File local = new File(endpoint.getLocalWorkDirectory());
OutputStream os;
GenericFile<ChannelSftp.LsEntry> file = (GenericFile<ChannelSftp.LsEntry>) exchange.getProperty(FileComponent.FILE_EXCHANGE_FILE);
ObjectHelper.notNull(file, "Exchange should have the " + FileComponent.FILE_EXCHANGE_FILE + " set");
try {
// use relative filename in local work directory
String relativeName = file.getRelativeFilePath();
temp = new File(local, relativeName + ".inprogress");
local = new File(local, relativeName);
// create directory to local work file
local.mkdirs();
// delete any existing files
if (temp.exists()) {
if (!FileUtil.deleteFile(temp)) {
throw new GenericFileOperationFailedException("Cannot delete existing local work file: " + temp);
}
}
if (local.exists()) {
if (!FileUtil.deleteFile(local)) {
throw new GenericFileOperationFailedException("Cannot delete existing local work file: " + local);
}
}
// create new temp local work file
if (!temp.createNewFile()) {
throw new GenericFileOperationFailedException("Cannot create new local work file: " + temp);
}
// store content as a file in the local work directory in the temp handle
os = new FileOutputStream(temp);
// set header with the path to the local work file
exchange.getIn().setHeader(Exchange.FILE_LOCAL_WORK_PATH, local.getPath());
} catch (Exception e) {
throw new GenericFileOperationFailedException("Cannot create new local work file: " + local);
}
String currentDir = null;
try {
// store the java.io.File handle as the body
file.setBody(local);
String remoteName = name;
if (endpoint.getConfiguration().isStepwise()) {
// remember current directory
currentDir = getCurrentDirectory();
// change directory to path where the file is to be retrieved
// (must do this as some FTP servers cannot retrieve using absolute path)
String path = FileUtil.onlyPath(name);
if (path != null) {
changeCurrentDirectory(path);
}
// remote name is now only the file name as we just changed directory
remoteName = FileUtil.stripPath(name);
}
channel.get(remoteName, os);
} catch (SftpException e) {
LOG.trace("Error occurred during retrieving file: {} to local directory. Deleting local work file: {}", name, temp);
// failed to retrieve the file so we need to close streams and delete in progress file
// must close stream before deleting file
IOHelper.close(os, "retrieve: " + name, LOG);
boolean deleted = FileUtil.deleteFile(temp);
if (!deleted) {
LOG.warn("Error occurred during retrieving file: " + name + " to local directory. Cannot delete local work file: " + temp);
}
throw new GenericFileOperationFailedException("Cannot retrieve file: " + name, e);
} finally {
IOHelper.close(os, "retrieve: " + name, LOG);
// change back to current directory if we changed directory
if (currentDir != null) {
changeCurrentDirectory(currentDir);
}
}
LOG.debug("Retrieve file to local work file result: true");
// operation went okay so rename temp to local after we have retrieved the data
LOG.trace("Renaming local in progress file from: {} to: {}", temp, local);
try {
if (!FileUtil.renameFile(temp, local, false)) {
throw new GenericFileOperationFailedException("Cannot rename local work file from: " + temp + " to: " + local);
}
} catch (IOException e) {
throw new GenericFileOperationFailedException("Cannot rename local work file from: " + temp + " to: " + local, e);
}
return true;
}
Aggregations