use of java.nio.file.Files in project FXGL by AlmasB.
the class SaveLoadManager method querySaveFiles.
/**
* Asynchronously (with a progress dialog) loads save files into observable list {@link #saveFiles()}.
*/
public void querySaveFiles() {
log.debug("Querying save files");
loadSaveFilesTask().onSuccess(files -> {
saveFiles.setAll(files);
Collections.sort(saveFiles, SaveFile.RECENT_FIRST);
}).executeAsyncWithDialogFX(new ProgressDialog("Loading save files"));
}
use of java.nio.file.Files in project meghanada-server by mopemope.
the class JavaAnalyzerTest method analyzeFail.
@Test
public void analyzeFail() throws Exception {
System.setProperty(Source.REPORT_UNKNOWN_TREE, "false");
final JavaAnalyzer analyzer = getAnalyzer();
final String cp = getSystemClasspath();
List<File> files;
try (Stream<Path> stream = Files.walk(new File("./src/main/java").getCanonicalFile().toPath(), FileVisitOption.FOLLOW_LINKS)) {
files = stream.filter(path -> {
File file = path.toFile();
return FileUtils.isJavaFile(file);
}).map(Path::toFile).collect(Collectors.toList());
}
try (Stream<Path> stream = Files.walk(new File("./src/test/java").getCanonicalFile().toPath(), FileVisitOption.FOLLOW_LINKS)) {
List<File> testFiles = stream.filter(path -> {
File file = path.toFile();
return FileUtils.isJavaFile(file);
}).map(Path::toFile).collect(Collectors.toList());
files.addAll(testFiles);
}
// System.setProperty(Source.REPORT_UNKNOWN_TREE, "true");
final String tmp = System.getProperty("java.io.tmpdir");
List<File> finalFiles = files;
timeIt(() -> {
final CompileResult compileResult = analyzer.analyzeAndCompile(finalFiles, cp, tmp);
// compileResult.getSources().values().forEach(Source::dump);
return compileResult;
});
}
use of java.nio.file.Files in project meghanada-server by mopemope.
the class JavaAnalyzerTest method analyzeAll.
@Test
public void analyzeAll() throws Exception {
System.setProperty(Source.REPORT_UNKNOWN_TREE, "true");
// project.clearCache();
final JavaAnalyzer analyzer = getAnalyzer();
final String cp = getClasspath();
List<File> files;
try (Stream<Path> stream = Files.walk(new File("./src/main/java").getCanonicalFile().toPath(), FileVisitOption.FOLLOW_LINKS)) {
files = stream.filter(path -> {
File file = path.toFile();
return FileUtils.isJavaFile(file);
}).map(Path::toFile).collect(Collectors.toList());
}
try (Stream<Path> stream = Files.walk(new File("./src/test/java").getCanonicalFile().toPath(), FileVisitOption.FOLLOW_LINKS)) {
List<File> testFiles = stream.filter(path -> {
File file = path.toFile();
return FileUtils.isJavaFile(file);
}).map(Path::toFile).collect(Collectors.toList());
files.addAll(testFiles);
}
final String tmp = System.getProperty("java.io.tmpdir");
List<File> finalFiles = files;
timeIt(() -> {
final CompileResult compileResult = analyzer.analyzeAndCompile(finalFiles, cp, tmp);
compileResult.getSources().values().forEach(Source::dump);
return compileResult;
});
}
use of java.nio.file.Files in project fess-crawler by codelibs.
the class FileSystemClient method getResponseData.
protected ResponseData getResponseData(final String uri, final boolean includeContent) {
final ResponseData responseData = new ResponseData();
try {
responseData.setMethod(Constants.GET_METHOD);
final String filePath = preprocessUri(uri);
responseData.setUrl(filePath);
File file = null;
try {
file = new File(new URI(filePath));
} catch (final URISyntaxException e) {
logger.warn("Could not parse url: " + filePath, e);
}
if (file == null) {
responseData.setHttpStatusCode(Constants.NOT_FOUND_STATUS_CODE);
responseData.setCharSet(charset);
responseData.setContentLength(0);
} else if (file.isFile()) {
// check file size
responseData.setContentLength(file.length());
checkMaxContentLength(responseData);
try {
final FileOwnerAttributeView ownerAttrView = Files.getFileAttributeView(file.toPath(), FileOwnerAttributeView.class);
if (ownerAttrView != null) {
UserPrincipal owner = ownerAttrView.getOwner();
if (owner != null) {
responseData.addMetaData(FS_FILE_USER, owner.getName());
}
}
} catch (Exception e) {
logger.warn("Failed to parse FileOwnerAttributeView.", e);
}
try {
final AclFileAttributeView aclView = Files.getFileAttributeView(file.toPath(), AclFileAttributeView.class);
if (aclView != null) {
responseData.addMetaData(FILE_ATTRIBUTE_VIEW, aclView);
responseData.addMetaData(FS_FILE_GROUPS, aclView.getAcl().stream().map(acl -> acl.principal().getName()).toArray(n -> new String[n]));
}
} catch (Exception e) {
logger.warn("Failed to parse AclFileAttributeView.", e);
}
try {
final PosixFileAttributeView posixView = Files.getFileAttributeView(file.toPath(), PosixFileAttributeView.class);
if (posixView != null) {
responseData.addMetaData(FILE_ATTRIBUTE_VIEW, posixView);
responseData.addMetaData(FS_FILE_GROUPS, new String[] { posixView.readAttributes().group().getName() });
}
} catch (Exception e) {
logger.warn("Failed to parse PosixFileAttributeView.", e);
}
responseData.setHttpStatusCode(Constants.OK_STATUS_CODE);
responseData.setCharSet(geCharSet(file));
responseData.setLastModified(new Date(file.lastModified()));
if (file.canRead()) {
final MimeTypeHelper mimeTypeHelper = crawlerContainer.getComponent("mimeTypeHelper");
try (final InputStream is = new BufferedInputStream(new FileInputStream(file))) {
responseData.setMimeType(mimeTypeHelper.getContentType(is, file.getName()));
} catch (final Exception e) {
responseData.setMimeType(mimeTypeHelper.getContentType(null, file.getName()));
}
if (contentLengthHelper != null) {
final long maxLength = contentLengthHelper.getMaxLength(responseData.getMimeType());
if (responseData.getContentLength() > maxLength) {
throw new MaxLengthExceededException("The content length (" + responseData.getContentLength() + " byte) is over " + maxLength + " byte. The url is " + filePath);
}
}
if (includeContent) {
if (file.length() < maxCachedContentSize) {
try (InputStream contentStream = new BufferedInputStream(new FileInputStream(file))) {
responseData.setResponseBody(InputStreamUtil.getBytes(contentStream));
} catch (final Exception e) {
logger.warn("I/O Exception.", e);
responseData.setHttpStatusCode(Constants.SERVER_ERROR_STATUS_CODE);
}
} else {
responseData.setResponseBody(file, false);
}
}
} else {
// Forbidden
responseData.setHttpStatusCode(Constants.FORBIDDEN_STATUS_CODE);
responseData.setMimeType(APPLICATION_OCTET_STREAM);
}
} else if (file.isDirectory()) {
final Set<RequestData> requestDataSet = new HashSet<>();
if (includeContent) {
final File[] files = file.listFiles();
if (files != null) {
for (final File f : files) {
final String chileUri = f.toURI().toASCIIString();
requestDataSet.add(RequestDataBuilder.newRequestData().get().url(chileUri).build());
}
}
}
throw new ChildUrlsException(requestDataSet, this.getClass().getName() + "#getResponseData");
} else {
responseData.setHttpStatusCode(Constants.NOT_FOUND_STATUS_CODE);
responseData.setCharSet(charset);
responseData.setContentLength(0);
}
} catch (final CrawlerSystemException e) {
CloseableUtil.closeQuietly(responseData);
throw e;
} catch (final Exception e) {
CloseableUtil.closeQuietly(responseData);
throw new CrawlingAccessException("Could not access " + uri, e);
}
return responseData;
}
use of java.nio.file.Files in project component-runtime by Talend.
the class BaseSpark method start.
protected Instances start() {
// prepare needed files on the file system (this sucks but is needed....)
final Version version = Version.find(sparkVersion);
final File sparkHome = buildSparkHome(version);
LOGGER.info("Copied spark libraries in " + sparkHome);
String masterHost;
try {
masterHost = InetAddress.getLocalHost().getHostName();
} catch (final UnknownHostException e) {
masterHost = "localhost";
}
final int masterPort = newPort();
final int webMasterPort = newPort();
final Collection<Runnable> closingTasks = new ArrayList<>();
final ClusterConfig localConfig = new ClusterConfig(masterHost, masterPort, webMasterPort, sparkHome, closingTasks, version);
config.set(localConfig);
closingTasks.add(config::remove);
final String host = masterHost;
Throwable exception = null;
try {
final SparkProcessMonitor master = new SparkProcessMonitor(localConfig, "spark-master-monitor", () -> isOpen(host, masterPort), "org.apache.spark.deploy.master.Master", "--host", masterHost, "--port", Integer.toString(masterPort), "--webui-port", Integer.toString(webMasterPort));
final Thread masterHook = new Thread(master::close);
Runtime.getRuntime().addShutdownHook(masterHook);
closingTasks.add(() -> Runtime.getRuntime().removeShutdownHook(masterHook));
closingTasks.add(master::close);
master.start();
assertTrue("master didn't start", master.isStarted());
LOGGER.info("Started Master on " + getSparkMaster());
// todo: enhance it if using slaves > 1, we need to lock all ports together
final int firstSlavePort = newPort();
final List<SparkProcessMonitor> slaves = IntStream.range(0, this.slaves).mapToObj(i -> {
final int slavePort = firstSlavePort + 1 + (2 * i);
return new SparkProcessMonitor(localConfig, "spark-slave-" + i + "-monitor", () -> isOpen(host, slavePort), "org.apache.spark.deploy.worker.Worker", "--host", host, "--port", Integer.toString(slavePort), "--webui-port", Integer.toString(slavePort + 1), getSparkMaster());
}).collect(toList());
slaves.stream().peek(s -> closingTasks.add(s::close)).map(m -> new Thread(m::close)).forEach(t -> {
Runtime.getRuntime().addShutdownHook(t);
closingTasks.add(() -> Runtime.getRuntime().removeShutdownHook(t));
});
slaves.forEach(SparkProcessMonitor::start);
if (slaves.stream().anyMatch(m -> !m.isStarted())) {
fail("Some slave(s) didn't start");
}
} catch (final Throwable error) {
exception = error;
}
return new Instances(() -> closingTasks.forEach(r -> {
try {
r.run();
} catch (final RuntimeException re) {
LOGGER.warn(re.getMessage(), re);
}
}), false, exception);
}
Aggregations