use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method store.
/**
* Store history for the whole repository in directory hierarchy resembling
* the original repository structure. History of individual files will be
* stored under this hierarchy, each file containing history of
* corresponding source file.
*
* @param history history object to process into per-file histories
* @param repository repository object
* @throws HistoryException
*/
@Override
public void store(History history, Repository repository) throws HistoryException {
final RuntimeEnvironment env = RuntimeEnvironment.getInstance();
String latestRev = null;
// Return immediately when there is nothing to do.
List<HistoryEntry> entries = history.getHistoryEntries();
if (entries.isEmpty()) {
return;
}
LOGGER.log(Level.FINE, "Storing history for repo {0}", new Object[] { repository.getDirectoryName() });
HashMap<String, List<HistoryEntry>> map = new HashMap<>();
/*
* Go through all history entries for this repository (acquired through
* history/log command executed for top-level directory of the repo
* and parsed into HistoryEntry structures) and create hash map which
* maps file names into list of HistoryEntry structures corresponding
* to changesets in which the file was modified.
*/
for (HistoryEntry e : history.getHistoryEntries()) {
// The history entries are sorted from newest to oldest.
if (latestRev == null) {
latestRev = e.getRevision();
}
for (String s : e.getFiles()) {
/*
* We do not want to generate history cache for files which
* do not currently exist in the repository.
*/
File test = new File(env.getSourceRootPath() + s);
if (!test.exists()) {
continue;
}
List<HistoryEntry> list = map.get(s);
if (list == null) {
list = new ArrayList<>();
map.put(s, list);
}
/*
* We need to do deep copy in order to have different tags
* per each commit.
*/
if (env.isTagsEnabled() && repository.hasFileBasedTags()) {
list.add(new HistoryEntry(e));
} else {
list.add(e);
}
}
}
/*
* Now traverse the list of files from the hash map built above
* and for each file store its history (saved in the value of the
* hash map entry for the file) in a file. Skip renamed files
* which will be handled separately below.
*/
final File root = RuntimeEnvironment.getInstance().getSourceRootFile();
for (Map.Entry<String, List<HistoryEntry>> map_entry : map.entrySet()) {
try {
if (env.isHandleHistoryOfRenamedFiles() && isRenamedFile(map_entry.getKey(), env, repository, history)) {
continue;
}
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "isRenamedFile() got exception ", ex);
}
doFileHistory(map_entry.getKey(), map_entry.getValue(), env, repository, null, root, false);
}
if (!env.isHandleHistoryOfRenamedFiles()) {
finishStore(repository, latestRev);
return;
}
/*
* Now handle renamed files (in parallel).
*/
HashMap<String, List<HistoryEntry>> renamed_map = new HashMap<>();
for (final Map.Entry<String, List<HistoryEntry>> map_entry : map.entrySet()) {
try {
if (isRenamedFile(map_entry.getKey(), env, repository, history)) {
renamed_map.put(map_entry.getKey(), map_entry.getValue());
}
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "isRenamedFile() got exception ", ex);
}
}
// handled in parallel.
for (final String file : renamed_map.keySet()) {
File cache = getCachedFile(new File(env.getSourceRootPath() + file));
File dir = cache.getParentFile();
if (!dir.isDirectory() && !dir.mkdirs()) {
LOGGER.log(Level.WARNING, "Unable to create cache directory ' {0} '.", dir);
}
}
final Repository repositoryF = repository;
final CountDownLatch latch = new CountDownLatch(renamed_map.size());
for (final Map.Entry<String, List<HistoryEntry>> map_entry : renamed_map.entrySet()) {
RuntimeEnvironment.getHistoryRenamedExecutor().submit(new Runnable() {
@Override
public void run() {
try {
doFileHistory(map_entry.getKey(), map_entry.getValue(), env, repositoryF, new File(env.getSourceRootPath() + map_entry.getKey()), root, true);
} catch (Exception ex) {
// We want to catch any exception since we are in thread.
LOGGER.log(Level.WARNING, "doFileHistory() got exception ", ex);
} finally {
latch.countDown();
}
}
});
}
// Wait for the executors to finish.
try {
// Wait for the executors to finish.
latch.await();
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, "latch exception ", ex);
}
finishStore(repository, latestRev);
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method getRepositoryHistDataDirname.
public String getRepositoryHistDataDirname(Repository repository) {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
String repoDirBasename;
try {
repoDirBasename = env.getPathRelativeToSourceRoot(new File(repository.getDirectoryName()), 0);
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Could not resolve " + repository.getDirectoryName() + " relative to source root", ex);
return null;
}
return env.getDataRootPath() + File.separatorChar + this.historyCacheDirName + repoDirBasename;
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method hasCacheForDirectory.
/**
* Check if the directory is in the cache.
* @param directory the directory to check
* @return {@code true} if the directory is in the cache
*/
@Override
public boolean hasCacheForDirectory(File directory, Repository repository) throws HistoryException {
assert directory.isDirectory();
Repository repos = HistoryGuru.getInstance().getRepository(directory);
if (repos == null) {
return true;
}
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
File dir = env.getDataRootFile();
dir = new File(dir, this.historyCacheDirName);
try {
dir = new File(dir, env.getPathRelativeToSourceRoot(new File(repos.getDirectoryName()), 0));
} catch (IOException e) {
throw new HistoryException("Could not resolve " + repos.getDirectoryName() + " relative to source root", e);
}
return dir.exists();
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class FileHistoryCache method get.
@Override
public History get(File file, Repository repository, boolean withFiles) throws HistoryException {
File cache = getCachedFile(file);
if (isUpToDate(file, cache)) {
try {
return readCache(cache);
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error when reading cache file '" + cache, e);
}
}
/*
* Some mirrors of repositories which are capable of fetching history
* for directories may contain lots of files untracked by given SCM.
* For these it would be waste of time to get their history
* since the history of all files in this repository should have been
* fetched in the first phase of indexing.
*/
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
if (isHistoryIndexDone() && repository.hasHistoryForDirectories() && !env.isFetchHistoryWhenNotInCache()) {
return null;
}
final History history;
long time;
try {
time = System.currentTimeMillis();
history = repository.getHistory(file);
time = System.currentTimeMillis() - time;
} catch (UnsupportedOperationException e) {
// workspace.
return null;
}
if (!file.isDirectory()) {
// invalidates the cache entry.
if ((cache != null) && (cache.exists() || (time > env.getHistoryReaderTimeLimit()))) {
// retrieving the history takes too long, cache it!
storeFile(history, file, repository);
}
}
return history;
}
use of org.opensolaris.opengrok.configuration.RuntimeEnvironment in project OpenGrok by OpenGrok.
the class MonotoneHistoryParser method processStream.
/**
* Process the output from the hg log command and insert the HistoryEntries
* into the history field.
*
* @param input The output from the process
* @throws java.io.IOException If an error occurs while reading the stream
*/
@Override
public void processStream(InputStream input) throws IOException {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
DateFormat df = repository.getDateFormat();
BufferedReader in = new BufferedReader(new InputStreamReader(input));
String s;
HistoryEntry entry = null;
int state = 0;
while ((s = in.readLine()) != null) {
s = s.trim();
// the minimum amount for maximum compatibility between monotone versions.
if (s.startsWith("-----------------------------------------------------------------")) {
if (entry != null && state > 2) {
entries.add(entry);
}
entry = new HistoryEntry();
entry.setActive(true);
state = 0;
continue;
}
switch(state) {
case 0:
if (s.startsWith("Revision:")) {
String rev = s.substring("Revision:".length()).trim();
entry.setRevision(rev);
++state;
}
break;
case 1:
if (s.startsWith("Author:")) {
entry.setAuthor(s.substring("Author:".length()).trim());
++state;
}
break;
case 2:
if (s.startsWith("Date:")) {
Date date = new Date();
try {
date = df.parse(s.substring("date:".length()).trim());
} catch (ParseException pe) {
//
throw new IOException("Could not parse date: " + s, pe);
}
entry.setDate(date);
++state;
}
break;
case 3:
if (s.startsWith("Modified ") || s.startsWith("Added ") || s.startsWith("Deleted ")) {
++state;
} else if (s.equalsIgnoreCase("ChangeLog:")) {
state = 5;
}
break;
case 4:
if (s.startsWith("Modified ") || s.startsWith("Added ") || s.startsWith("Deleted ")) {
//NOPMD
/* swallow */
} else if (s.equalsIgnoreCase("ChangeLog:")) {
state = 5;
} else {
String[] files = s.split(" ");
for (String f : files) {
File file = new File(mydir, f);
try {
entry.addFile(env.getPathRelativeToSourceRoot(file, 0));
} catch (FileNotFoundException e) {
// NOPMD
// If the file is not located under the source root, ignore it
}
}
}
break;
case 5:
entry.appendMessage(s);
break;
default:
LOGGER.warning("Unknown parser state: " + state);
break;
}
}
if (entry != null && state > 2) {
entries.add(entry);
}
}
Aggregations