use of java.io.FileFilter in project intellij-community by JetBrains.
the class JavaBuilderUtil method updateMappings.
/**
* @param filesToCompile files compiled in this round
* @param markDirtyRound compilation round at which dirty files should be visible to builders
* @return true if additional compilation pass is required, false otherwise
* @throws Exception
*/
private static boolean updateMappings(CompileContext context, final Mappings delta, DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, ModuleChunk chunk, Collection<File> filesToCompile, Collection<File> successfullyCompiled, final CompilationRound markDirtyRound, @Nullable FileFilter skipMarkingDirtyFilter) throws IOException {
try {
boolean performIntegrate = true;
boolean additionalPassRequired = false;
final Set<String> removedPaths = getRemovedPaths(chunk, dirtyFilesHolder);
final Mappings globalMappings = context.getProjectDescriptor().dataManager.getMappings();
final boolean errorsDetected = Utils.errorsDetected(context);
if (!isForcedRecompilationAllJavaModules(context)) {
if (context.shouldDifferentiate(chunk)) {
context.processMessage(new ProgressMessage("Checking dependencies... [" + chunk.getPresentableShortName() + "]"));
final Set<File> allCompiledFiles = getFilesContainer(context, ALL_COMPILED_FILES_KEY);
final Set<File> allAffectedFiles = getFilesContainer(context, ALL_AFFECTED_FILES_KEY);
// mark as affected all files that were dirty before compilation
allAffectedFiles.addAll(filesToCompile);
// accumulate all successfully compiled in this round
allCompiledFiles.addAll(successfullyCompiled);
// unmark as affected all successfully compiled
allAffectedFiles.removeAll(successfullyCompiled);
final Set<File> affectedBeforeDif = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
affectedBeforeDif.addAll(allAffectedFiles);
final Set<File> compiledWithErrors = getFilesContainer(context, COMPILED_WITH_ERRORS_KEY);
COMPILED_WITH_ERRORS_KEY.set(context, null);
final ModulesBasedFileFilter moduleBasedFilter = new ModulesBasedFileFilter(context, chunk);
final boolean incremental = globalMappings.differentiateOnIncrementalMake(delta, removedPaths, filesToCompile, compiledWithErrors, allCompiledFiles, allAffectedFiles, moduleBasedFilter, CONSTANT_SEARCH_SERVICE.get(context));
if (LOG.isDebugEnabled()) {
LOG.debug("Differentiate Results:");
LOG.debug(" Compiled Files:");
for (final File c : allCompiledFiles) {
LOG.debug(" " + c.getAbsolutePath());
}
LOG.debug(" Affected Files:");
for (final File c : allAffectedFiles) {
LOG.debug(" " + c.getAbsolutePath());
}
LOG.debug("End Of Differentiate Results.");
}
if (incremental) {
final Set<File> newlyAffectedFiles = new HashSet<>(allAffectedFiles);
newlyAffectedFiles.removeAll(affectedBeforeDif);
final String infoMessage = "Dependency analysis found " + newlyAffectedFiles.size() + " affected files";
LOG.info(infoMessage);
context.processMessage(new ProgressMessage(infoMessage));
removeFilesAcceptedByFilter(newlyAffectedFiles, skipMarkingDirtyFilter);
if (!newlyAffectedFiles.isEmpty()) {
if (LOG.isDebugEnabled()) {
for (File file : newlyAffectedFiles) {
LOG.debug("affected file: " + file.getPath());
}
final List<Pair<File, JpsModule>> wrongFiles = checkAffectedFilesInCorrectModules(context, newlyAffectedFiles, moduleBasedFilter);
if (!wrongFiles.isEmpty()) {
LOG.debug("Wrong affected files for module chunk " + chunk.getName() + ": ");
for (Pair<File, JpsModule> pair : wrongFiles) {
final String name = pair.second != null ? pair.second.getName() : "null";
LOG.debug("\t[" + name + "] " + pair.first.getPath());
}
}
}
for (File file : newlyAffectedFiles) {
FSOperations.markDirtyIfNotDeleted(context, markDirtyRound, file);
}
additionalPassRequired = isCompileJavaIncrementally(context) && chunkContainsAffectedFiles(context, chunk, newlyAffectedFiles);
}
} else {
// non-incremental mode
final String messageText = "Marking " + chunk.getPresentableShortName() + " and direct dependants for recompilation";
LOG.info("Non-incremental mode: " + messageText);
context.processMessage(new ProgressMessage(messageText));
final boolean alreadyMarkedDirty = FSOperations.isMarkedDirty(context, chunk);
additionalPassRequired = isCompileJavaIncrementally(context) && !alreadyMarkedDirty;
if (alreadyMarkedDirty) {
// need this to make sure changes data stored in Delta is complete
globalMappings.differentiateOnNonIncrementalMake(delta, removedPaths, filesToCompile);
} else {
performIntegrate = false;
}
FileFilter toBeMarkedFilter = skipMarkingDirtyFilter == null ? null : new NegationFileFilter(skipMarkingDirtyFilter);
FSOperations.markDirtyRecursively(context, markDirtyRound, chunk, toBeMarkedFilter);
}
} else {
if (!errorsDetected) {
// makes sense only if we are going to integrate changes
globalMappings.differentiateOnNonIncrementalMake(delta, removedPaths, filesToCompile);
}
}
} else {
if (!errorsDetected) {
// makes sense only if we are going to integrate changes
globalMappings.differentiateOnRebuild(delta);
}
}
if (errorsDetected) {
// will be compiled during the first phase of the next make
return false;
}
if (performIntegrate) {
context.processMessage(new ProgressMessage("Updating dependency information... [" + chunk.getPresentableShortName() + "]"));
globalMappings.integrate(delta);
}
return additionalPassRequired;
} catch (BuildDataCorruptedException e) {
throw e.getCause();
} finally {
// clean progress messages
context.processMessage(new ProgressMessage(""));
}
}
use of java.io.FileFilter in project intellij-community by JetBrains.
the class JavaSourceRootDescriptor method createFileFilter.
@NotNull
@Override
public FileFilter createFileFilter() {
final JpsCompilerExcludes excludes = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration(target.getModule().getProject()).getCompilerExcludes();
final FileFilter baseFilter = BuilderRegistry.getInstance().getModuleBuilderFileFilter();
return file -> baseFilter.accept(file) && !excludes.isExcluded(file);
}
use of java.io.FileFilter in project algoliasearch-client-android by algolia.
the class OfflineClient method listIndexesOfflineSync.
/**
* List existing offline indices.
*
* **Note:** This applies both to {@link MirroredIndex} and {@link OfflineIndex} instances. Only indices that
* *actually exist* on disk are listed. If an instance was created but never synced or written to, it will not
* appear in the list.
*
* @return A JSON object with an `items` attribute containing the indices details as JSON objects.
*/
private JSONObject listIndexesOfflineSync() throws AlgoliaException {
try {
final String rootDataPath = getRootDataDir().getAbsolutePath();
final File appDir = getAppDir();
final File[] directories = appDir.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isDirectory();
}
});
JSONObject response = new JSONObject();
JSONArray items = new JSONArray();
if (directories != null) {
for (File directory : directories) {
final String name = directory.getName();
if (hasOfflineData(name)) {
items.put(new JSONObject().put("name", name));
// TODO: Do we need other data as in the online API?
}
}
}
response.put("items", items);
return response;
} catch (JSONException e) {
// should never happen
throw new RuntimeException(e);
}
}
use of java.io.FileFilter in project aries by apache.
the class BundleMock method findEntries.
public Enumeration<URL> findEntries(String baseDir, String matchRule, boolean recurse) {
System.err.println("findEntries: " + baseDir + ", " + matchRule + ", " + recurse);
File base;
try {
base = new File(new File(new URL(location.replaceAll(" ", "%20")).toURI()), baseDir);
System.err.println("Base dir: " + base);
} catch (Exception e) {
Error err = new AssertionFailedError("Unable to findEntries from " + location);
err.initCause(e);
throw err;
}
if (matchRule.equals("*.xml"))
matchRule = ".*\\.xml";
else
matchRule = matchRule.replaceAll("\\*", ".*");
System.err.println("matchrule: " + matchRule);
final Pattern p = Pattern.compile(matchRule);
File[] files = base.listFiles(new FileFilter() {
public boolean accept(File pathname) {
return pathname.isFile() && p.matcher(pathname.getName()).matches();
}
});
Vector<URL> v = new Vector<URL>();
if (files != null) {
for (File f : files) {
try {
v.add(f.toURL());
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} else {
System.err.println("no matching files");
}
if (v.isEmpty()) {
return null;
} else {
System.err.println(v);
return v.elements();
}
}
use of java.io.FileFilter in project GDSC-SMLM by aherbert.
the class CMOSAnalysis method showDialog.
private boolean showDialog() {
// Determine sub-directories to process
File dir = new File(directory);
File[] dirs = dir.listFiles(new FileFilter() {
public boolean accept(File pathname) {
return pathname.isDirectory();
}
});
if (dirs.length == 0) {
IJ.error(TITLE, "No sub-directories");
return false;
}
// Get only those with numbers at the end.
// These should correspond to exposure times
subDirs = new TurboList<SubDir>();
Pattern p = Pattern.compile("([0-9]+)$");
for (File path : dirs) {
String name = path.getName();
Matcher m = p.matcher(name);
if (m.find()) {
int t = Integer.parseInt(m.group(1));
subDirs.add(new SubDir(t, path, name));
}
}
if (subDirs.size() < 2) {
IJ.error(TITLE, "Not enough sub-directories with exposure time suffix");
return false;
}
Collections.sort(subDirs);
if (subDirs.get(0).exposureTime != 0) {
IJ.error(TITLE, "No sub-directories with exposure time 0");
return false;
}
for (SubDir sd : subDirs) {
Utils.log("Sub-directory: %s. Exposure time = %d", sd.name, sd.exposureTime);
}
GenericDialog gd = new GenericDialog(TITLE);
gd.addHelp(About.HELP_URL);
//@formatter:off
gd.addMessage("Analyse the per-pixel offset, variance and gain of sCMOS images.\n \n" + TextUtils.wrap("See Huang et al (2013) Video-rate nanoscopy using sCMOS camera–specific " + "single-molecule localization algorithms. Nature Methods 10, 653-658 " + "(Supplementary Information).", 80));
//@formatter:on
gd.addNumericField("nThreads", getLastNThreads(), 0);
gd.addCheckbox("Rolling_algorithm", rollingAlgorithm);
gd.showDialog();
if (gd.wasCanceled())
return false;
setThreads((int) gd.getNextNumber());
rollingAlgorithm = gd.getNextBoolean();
return true;
}
Aggregations