use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method getRepositoryHistDataDirname.
public String getRepositoryHistDataDirname(Repository repository) {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
String repoDirBasename;
try {
repoDirBasename = env.getPathRelativeToSourceRoot(new File(repository.getDirectoryName()));
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Could not resolve " + repository.getDirectoryName() + " relative to source root", ex);
return null;
} catch (ForbiddenSymlinkException ex) {
LOGGER.log(Level.FINER, ex.getMessage());
return null;
}
return env.getDataRootPath() + File.separatorChar + FileHistoryCache.historyCacheDirName + repoDirBasename;
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method hasCacheForDirectory.
/**
* Check if the directory is in the cache.
* @param directory the directory to check
* @return {@code true} if the directory is in the cache
*/
@Override
public boolean hasCacheForDirectory(File directory, Repository repository) throws HistoryException {
assert directory.isDirectory();
Repository repos = HistoryGuru.getInstance().getRepository(directory);
if (repos == null) {
return true;
}
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
File dir = env.getDataRootFile();
dir = new File(dir, FileHistoryCache.historyCacheDirName);
try {
dir = new File(dir, env.getPathRelativeToSourceRoot(new File(repos.getDirectoryName())));
} catch (ForbiddenSymlinkException e) {
LOGGER.log(Level.FINER, e.getMessage());
return false;
} catch (IOException e) {
throw new HistoryException("Could not resolve " + repos.getDirectoryName() + " relative to source root", e);
}
return dir.exists();
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method get.
@Override
public History get(File file, Repository repository, boolean withFiles) throws HistoryException, ForbiddenSymlinkException {
File cache = getCachedFile(file);
if (isUpToDate(file, cache)) {
try {
return readCache(cache);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error when reading cache file '" + cache, e);
}
}
/*
* Some mirrors of repositories which are capable of fetching history
* for directories may contain lots of files untracked by given SCM.
* For these it would be waste of time to get their history
* since the history of all files in this repository should have been
* fetched in the first phase of indexing.
*/
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
if (isHistoryIndexDone() && repository.hasHistoryForDirectories() && !env.isFetchHistoryWhenNotInCache()) {
return null;
}
final History history;
long time;
try {
time = System.currentTimeMillis();
history = repository.getHistory(file);
time = System.currentTimeMillis() - time;
} catch (UnsupportedOperationException e) {
// workspace.
return null;
}
if (!file.isDirectory()) {
// invalidates the cache entry.
if ((cache != null) && (cache.exists() || (time > env.getHistoryReaderTimeLimit()))) {
// retrieving the history takes too long, cache it!
storeFile(history, file, repository);
}
}
return history;
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method mergeOldAndNewHistory.
/**
* Read history from cacheFile and merge it with histNew, return merged history.
*
* @param cacheFile file to where the history object will be stored
* @param histNew history object with new history entries
* @param repo repository to where pre pre-image of the cacheFile belong
* @return merged history (can be null if merge failed for some reason)
* @throws HistoryException
*/
private History mergeOldAndNewHistory(File cacheFile, History histNew, Repository repo) throws HistoryException {
History histOld;
History history = null;
try {
histOld = readCache(cacheFile);
// Merge old history with the new history.
List<HistoryEntry> listOld = histOld.getHistoryEntries();
if (!listOld.isEmpty()) {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
List<HistoryEntry> listNew = histNew.getHistoryEntries();
ListIterator li = listNew.listIterator(listNew.size());
while (li.hasPrevious()) {
listOld.add(0, (HistoryEntry) li.previous());
}
history = new History(listOld);
// to this somewhat crude solution.
if (env.isTagsEnabled() && repo.hasFileBasedTags()) {
for (HistoryEntry ent : history.getHistoryEntries()) {
ent.setTags(null);
}
repo.assignTagsInHistory(history);
}
}
} catch (IOException ex) {
LOGGER.log(Level.SEVERE, String.format("Cannot open history cache file %s", cacheFile.getPath()), ex);
}
return history;
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method store.
/**
* Store history for the whole repository in directory hierarchy resembling
* the original repository structure. History of individual files will be
* stored under this hierarchy, each file containing history of
* corresponding source file.
*
* @param history history object to process into per-file histories
* @param repository repository object
* @throws HistoryException
*/
@Override
public void store(History history, Repository repository) throws HistoryException {
final RuntimeEnvironment env = RuntimeEnvironment.getInstance();
String latestRev = null;
// Return immediately when there is nothing to do.
List<HistoryEntry> entries = history.getHistoryEntries();
if (entries.isEmpty()) {
return;
}
LOGGER.log(Level.FINE, "Storing history for repo {0}", new Object[] { repository.getDirectoryName() });
// Firstly store the history for the top-level directory.
doFileHistory(repository.getDirectoryName(), history.getHistoryEntries(), env, repository, env.getSourceRootFile(), null, false);
HashMap<String, List<HistoryEntry>> map = new HashMap<>();
/*
* Go through all history entries for this repository (acquired through
* history/log command executed for top-level directory of the repo
* and parsed into HistoryEntry structures) and create hash map which
* maps file names into list of HistoryEntry structures corresponding
* to changesets in which the file was modified.
*/
for (HistoryEntry e : history.getHistoryEntries()) {
// The history entries are sorted from newest to oldest.
if (latestRev == null) {
latestRev = e.getRevision();
}
for (String s : e.getFiles()) {
/*
* We do not want to generate history cache for files which
* do not currently exist in the repository.
*/
File test = new File(env.getSourceRootPath() + s);
if (!test.exists()) {
continue;
}
List<HistoryEntry> list = map.get(s);
if (list == null) {
list = new ArrayList<>();
map.put(s, list);
}
/*
* We need to do deep copy in order to have different tags
* per each commit.
*/
if (env.isTagsEnabled() && repository.hasFileBasedTags()) {
list.add(new HistoryEntry(e));
} else {
list.add(e);
}
}
}
/*
* Now traverse the list of files from the hash map built above
* and for each file store its history (saved in the value of the
* hash map entry for the file) in a file. Skip renamed files
* which will be handled separately below.
*/
final File root = RuntimeEnvironment.getInstance().getSourceRootFile();
for (Map.Entry<String, List<HistoryEntry>> map_entry : map.entrySet()) {
try {
if (env.isHandleHistoryOfRenamedFiles() && isRenamedFile(map_entry.getKey(), env, repository, history)) {
continue;
}
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "isRenamedFile() got exception ", ex);
}
doFileHistory(map_entry.getKey(), map_entry.getValue(), env, repository, null, root, false);
}
if (!env.isHandleHistoryOfRenamedFiles()) {
finishStore(repository, latestRev);
return;
}
/*
* Now handle renamed files (in parallel).
*/
HashMap<String, List<HistoryEntry>> renamed_map = new HashMap<>();
for (final Map.Entry<String, List<HistoryEntry>> map_entry : map.entrySet()) {
try {
if (isRenamedFile(map_entry.getKey(), env, repository, history)) {
renamed_map.put(map_entry.getKey(), map_entry.getValue());
}
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "isRenamedFile() got exception ", ex);
}
}
// handled in parallel.
for (final String file : renamed_map.keySet()) {
File cache;
try {
cache = getCachedFile(new File(env.getSourceRootPath() + file));
} catch (ForbiddenSymlinkException ex) {
LOGGER.log(Level.FINER, ex.getMessage());
continue;
}
File dir = cache.getParentFile();
if (!dir.isDirectory() && !dir.mkdirs()) {
LOGGER.log(Level.WARNING, "Unable to create cache directory ' {0} '.", dir);
}
}
final Repository repositoryF = repository;
final CountDownLatch latch = new CountDownLatch(renamed_map.size());
for (final Map.Entry<String, List<HistoryEntry>> map_entry : renamed_map.entrySet()) {
RuntimeEnvironment.getHistoryRenamedExecutor().submit(new Runnable() {
@Override
public void run() {
try {
doFileHistory(map_entry.getKey(), map_entry.getValue(), env, repositoryF, new File(env.getSourceRootPath() + map_entry.getKey()), root, true);
} catch (Exception ex) {
// We want to catch any exception since we are in thread.
LOGGER.log(Level.WARNING, "doFileHistory() got exception ", ex);
} finally {
latch.countDown();
}
}
});
}
// Wait for the executors to finish.
try {
// Wait for the executors to finish.
latch.await();
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, "latch exception ", ex);
}
finishStore(repository, latestRev);
}
Aggregations