use of java.io.FileNotFoundException in project groovy by apache.
the class AntlrParserPlugin method outputASTInVariousFormsIfNeeded.
private void outputASTInVariousFormsIfNeeded(SourceUnit sourceUnit, SourceBuffer sourceBuffer) {
// straight xstream output of AST
// uppercase to hide from jarjar
String formatProp = System.getProperty("ANTLR.AST".toLowerCase());
if ("xml".equals(formatProp)) {
saveAsXML(sourceUnit.getName(), ast);
}
// 'pretty printer' output of AST
if ("groovy".equals(formatProp)) {
try {
PrintStream out = new PrintStream(new FileOutputStream(sourceUnit.getName() + ".pretty.groovy"));
Visitor visitor = new SourcePrinter(out, tokenNames);
AntlrASTProcessor treewalker = new SourceCodeTraversal(visitor);
treewalker.process(ast);
} catch (FileNotFoundException e) {
System.out.println("Cannot create " + sourceUnit.getName() + ".pretty.groovy");
}
}
// which is a really nice way of seeing the AST, folding nodes etc
if ("mindmap".equals(formatProp)) {
try {
PrintStream out = new PrintStream(new FileOutputStream(sourceUnit.getName() + ".mm"));
Visitor visitor = new MindMapPrinter(out, tokenNames);
AntlrASTProcessor treewalker = new PreOrderTraversal(visitor);
treewalker.process(ast);
} catch (FileNotFoundException e) {
System.out.println("Cannot create " + sourceUnit.getName() + ".mm");
}
}
// include original line/col info and source code on the mindmap output
if ("extendedMindmap".equals(formatProp)) {
try {
PrintStream out = new PrintStream(new FileOutputStream(sourceUnit.getName() + ".mm"));
Visitor visitor = new MindMapPrinter(out, tokenNames, sourceBuffer);
AntlrASTProcessor treewalker = new PreOrderTraversal(visitor);
treewalker.process(ast);
} catch (FileNotFoundException e) {
System.out.println("Cannot create " + sourceUnit.getName() + ".mm");
}
}
// html output of AST
if ("html".equals(formatProp)) {
try {
PrintStream out = new PrintStream(new FileOutputStream(sourceUnit.getName() + ".html"));
List<VisitorAdapter> v = new ArrayList<VisitorAdapter>();
v.add(new NodeAsHTMLPrinter(out, tokenNames));
v.add(new SourcePrinter(out, tokenNames));
Visitor visitors = new CompositeVisitor(v);
AntlrASTProcessor treewalker = new SourceCodeTraversal(visitors);
treewalker.process(ast);
} catch (FileNotFoundException e) {
System.out.println("Cannot create " + sourceUnit.getName() + ".html");
}
}
}
use of java.io.FileNotFoundException in project hadoop by apache.
the class SFTPFileSystem method getFileStatus.
/**
* Convenience method, so that we don't open a new connection when using this
* method from within another method. Otherwise every API invocation incurs
* the overhead of opening/closing a TCP connection.
*/
@SuppressWarnings("unchecked")
private FileStatus getFileStatus(ChannelSftp client, Path file) throws IOException {
FileStatus fileStat = null;
Path workDir;
try {
workDir = new Path(client.pwd());
} catch (SftpException e) {
throw new IOException(e);
}
Path absolute = makeAbsolute(workDir, file);
Path parentPath = absolute.getParent();
if (parentPath == null) {
// root directory
// Length of root directory on server not known
long length = -1;
boolean isDir = true;
int blockReplication = 1;
// Block Size not known.
long blockSize = DEFAULT_BLOCK_SIZE;
// Modification time of root directory not known.
long modTime = -1;
Path root = new Path("/");
return new FileStatus(length, isDir, blockReplication, blockSize, modTime, root.makeQualified(this.getUri(), this.getWorkingDirectory()));
}
String pathName = parentPath.toUri().getPath();
Vector<LsEntry> sftpFiles;
try {
sftpFiles = (Vector<LsEntry>) client.ls(pathName);
} catch (SftpException e) {
throw new FileNotFoundException(String.format(E_FILE_NOTFOUND, file));
}
if (sftpFiles != null) {
for (LsEntry sftpFile : sftpFiles) {
if (sftpFile.getFilename().equals(file.getName())) {
// file found in directory
fileStat = getFileStatus(client, sftpFile, parentPath);
break;
}
}
if (fileStat == null) {
throw new FileNotFoundException(String.format(E_FILE_NOTFOUND, file));
}
} else {
throw new FileNotFoundException(String.format(E_FILE_NOTFOUND, file));
}
return fileStat;
}
use of java.io.FileNotFoundException in project hadoop by apache.
the class RawLocalFileSystem method truncate.
@Override
public boolean truncate(Path f, final long newLength) throws IOException {
FileStatus status = getFileStatus(f);
if (status == null) {
throw new FileNotFoundException("File " + f + " not found");
}
if (status.isDirectory()) {
throw new IOException("Cannot truncate a directory (=" + f + ")");
}
long oldLength = status.getLen();
if (newLength > oldLength) {
throw new IllegalArgumentException("Cannot truncate to a larger file size. Current size: " + oldLength + ", truncate size: " + newLength + ".");
}
try (FileOutputStream out = new FileOutputStream(pathToFile(f), true)) {
try {
out.getChannel().truncate(newLength);
} catch (IOException e) {
throw new FSError(e);
}
}
return true;
}
use of java.io.FileNotFoundException in project hadoop by apache.
the class RawLocalFileSystem method listStatus.
/**
* {@inheritDoc}
*
* (<b>Note</b>: Returned list is not sorted in any given order,
* due to reliance on Java's {@link File#list()} API.)
*/
@Override
public FileStatus[] listStatus(Path f) throws IOException {
File localf = pathToFile(f);
FileStatus[] results;
if (!localf.exists()) {
throw new FileNotFoundException("File " + f + " does not exist");
}
if (localf.isDirectory()) {
String[] names = FileUtil.list(localf);
results = new FileStatus[names.length];
int j = 0;
for (int i = 0; i < names.length; i++) {
try {
// Assemble the path using the Path 3 arg constructor to make sure
// paths with colon are properly resolved on Linux
results[j] = getFileStatus(new Path(f, new Path(null, null, names[i])));
j++;
} catch (FileNotFoundException e) {
// ignore the files not found since the dir list may have have
// changed since the names[] list was generated.
}
}
if (j == names.length) {
return results;
}
return Arrays.copyOf(results, j);
}
if (!useDeprecatedFileStatus) {
return new FileStatus[] { getFileStatus(f) };
}
return new FileStatus[] { new DeprecatedRawLocalFileStatus(localf, getDefaultBlockSize(f), this) };
}
use of java.io.FileNotFoundException in project hadoop by apache.
the class FTPFileSystem method delete.
/**
* Convenience method, so that we don't open a new connection when using this
* method from within another method. Otherwise every API invocation incurs
* the overhead of opening/closing a TCP connection.
*/
private boolean delete(FTPClient client, Path file, boolean recursive) throws IOException {
Path workDir = new Path(client.printWorkingDirectory());
Path absolute = makeAbsolute(workDir, file);
String pathName = absolute.toUri().getPath();
try {
FileStatus fileStat = getFileStatus(client, absolute);
if (fileStat.isFile()) {
return client.deleteFile(pathName);
}
} catch (FileNotFoundException e) {
//the file is not there
return false;
}
FileStatus[] dirEntries = listStatus(client, absolute);
if (dirEntries != null && dirEntries.length > 0 && !(recursive)) {
throw new IOException("Directory: " + file + " is not empty.");
}
for (FileStatus dirEntry : dirEntries) {
delete(client, new Path(absolute, dirEntry.getPath()), recursive);
}
return client.removeDirectory(pathName);
}
Aggregations