use of java.text.ParseException in project OpenGrok by OpenGrok.
the class BazaarHistoryParser method processStream.
/**
* Process the output from the log command and insert the HistoryEntries
* into the history field.
*
* @param input The output from the process
* @throws java.io.IOException If an error occurs while reading the stream
*/
@Override
public void processStream(InputStream input) throws IOException {
DateFormat df = repository.getDateFormat();
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
BufferedReader in = new BufferedReader(new InputStreamReader(input));
String s;
HistoryEntry entry = null;
int state = 0;
while ((s = in.readLine()) != null) {
if ("------------------------------------------------------------".equals(s)) {
if (entry != null && state > 2) {
entries.add(entry);
}
entry = new HistoryEntry();
entry.setActive(true);
state = 0;
continue;
}
switch(state) {
case 0:
// First, go on until revno is found.
if (s.startsWith("revno:")) {
String[] rev = s.substring("revno:".length()).trim().split(" ");
entry.setRevision(rev[0]);
++state;
}
break;
case 1:
// Then, look for committer.
if (s.startsWith("committer:")) {
entry.setAuthor(s.substring("committer:".length()).trim());
++state;
}
break;
case 2:
// And then, look for timestamp.
if (s.startsWith("timestamp:")) {
try {
Date date = df.parse(s.substring("timestamp:".length()).trim());
entry.setDate(date);
} catch (ParseException e) {
//
throw new IOException("Failed to parse history timestamp:" + s, e);
}
++state;
}
break;
case 3:
// message.
if (s.startsWith("modified:") || s.startsWith("added:") || s.startsWith("removed:")) {
++state;
} else if (s.startsWith(" ")) {
// Commit messages returned by bzr log -v are prefixed
// with two blanks.
entry.appendMessage(s.substring(2));
}
break;
case 4:
// files. (Except the labels.)
if (!(s.startsWith("modified:") || s.startsWith("added:") || s.startsWith("removed:"))) {
// The list of files is prefixed with blanks.
s = s.trim();
int idx = s.indexOf(" => ");
if (idx != -1) {
s = s.substring(idx + 4);
}
File f = new File(myDir, s);
String name = env.getPathRelativeToSourceRoot(f, 0);
entry.addFile(name);
}
break;
default:
LOGGER.log(Level.WARNING, "Unknown parser state: {0}", state);
break;
}
}
if (entry != null && state > 2) {
entries.add(entry);
}
}
use of java.text.ParseException in project OpenGrok by OpenGrok.
the class MercurialHistoryParser method processStream.
/**
* Process the output from the hg log command and insert the HistoryEntries
* into the history field.
*
* @param input The output from the process
* @throws java.io.IOException If an error occurs while reading the stream
*/
@Override
public void processStream(InputStream input) throws IOException {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
DateFormat df = repository.getDateFormat();
BufferedReader in = new BufferedReader(new InputStreamReader(input));
entries = new ArrayList<HistoryEntry>();
String s;
HistoryEntry entry = null;
while ((s = in.readLine()) != null) {
if (s.startsWith(MercurialRepository.CHANGESET)) {
entry = new HistoryEntry();
entries.add(entry);
entry.setActive(true);
entry.setRevision(s.substring(MercurialRepository.CHANGESET.length()).trim());
} else if (s.startsWith(MercurialRepository.USER) && entry != null) {
entry.setAuthor(s.substring(MercurialRepository.USER.length()).trim());
} else if (s.startsWith(MercurialRepository.DATE) && entry != null) {
Date date = new Date();
try {
date = df.parse(s.substring(MercurialRepository.DATE.length()).trim());
} catch (ParseException pe) {
//
throw new IOException("Could not parse date: " + s, pe);
}
entry.setDate(date);
} else if (s.startsWith(MercurialRepository.FILES) && entry != null) {
String[] strings = s.split(" ");
for (int ii = 1; ii < strings.length; ++ii) {
if (strings[ii].length() > 0) {
File f = new File(mydir, strings[ii]);
try {
entry.addFile(env.getPathRelativeToSourceRoot(f, 0));
} catch (FileNotFoundException e) {
// NOPMD
// If the file is not located under the source root,
// ignore it (bug #11664).
}
}
}
} else if (s.startsWith(MercurialRepository.FILE_COPIES) && entry != null && isDir) {
/*
* 'file_copies:' should be present only for directories but
* we use isDir to be on the safe side.
*/
s = s.replaceFirst(MercurialRepository.FILE_COPIES, "");
String[] splitArray = s.split("\\)");
for (String part : splitArray) {
/*
* This will fail for file names containing ' ('.
*/
String[] move = part.split(" \\(");
File f = new File(mydir + move[0]);
if (!move[0].isEmpty() && f.exists() && !renamedFiles.contains(move[0])) {
renamedFiles.add(move[0]);
}
}
} else if (s.startsWith(DESC_PREFIX) && entry != null) {
entry.setMessage(decodeDescription(s));
} else if (s.equals(MercurialRepository.END_OF_ENTRY) && entry != null) {
entry = null;
} else if (s.length() > 0) {
LOGGER.log(Level.WARNING, "Invalid/unexpected output {0} from hg log for repo {1}", new Object[] { s, repository.getDirectoryName() });
}
}
}
use of java.text.ParseException in project OpenGrok by OpenGrok.
the class ClearCaseHistoryParser method processStream.
/**
* Process the output from the log command and insert the HistoryEntries
* into the history field.
*
* @param input The output from the process
* @throws java.io.IOException If an error occurs while reading the stream
*/
@Override
public void processStream(InputStream input) throws IOException {
DateFormat df = repository.getDateFormat();
BufferedReader in = new BufferedReader(new InputStreamReader(input));
List<HistoryEntry> entries = new ArrayList<HistoryEntry>();
String s;
HistoryEntry entry = null;
while ((s = in.readLine()) != null) {
if (!"create version".equals(s) && !"create directory version".equals(s)) {
// skip this history entry
while ((s = in.readLine()) != null) {
if (".".equals(s)) {
break;
}
}
continue;
}
entry = new HistoryEntry();
if ((s = in.readLine()) != null) {
try {
entry.setDate(df.parse(s));
} catch (ParseException pe) {
//
throw new IOException("Could not parse date: " + s, pe);
}
}
if ((s = in.readLine()) != null) {
entry.setAuthor(s);
}
if ((s = in.readLine()) != null) {
s = s.replace('\\', '/');
entry.setRevision(s);
}
StringBuffer message = new StringBuffer();
String glue = "";
while ((s = in.readLine()) != null && !".".equals(s)) {
if ("".equals(s)) {
// avoid empty lines in comments
continue;
}
message.append(glue);
message.append(s.trim());
glue = "\n";
}
entry.setMessage(message.toString());
entry.setActive(true);
entries.add(entry);
}
history = new History();
history.setHistoryEntries(entries);
}
use of java.text.ParseException in project OpenGrok by OpenGrok.
the class Indexer method main.
/**
* Program entry point
*
* @param argv argument vector
*/
@SuppressWarnings("PMD.UseStringBufferForStringAppends")
public static void main(String[] argv) {
//this won't count JVM creation though
Statistics stats = new Statistics();
boolean runIndex = true;
boolean update = true;
boolean optimizedChanged = false;
ArrayList<String> zapCache = new ArrayList<>();
CommandLineOptions cmdOptions = new CommandLineOptions();
if (argv.length == 0) {
System.err.println(cmdOptions.getUsage());
System.exit(1);
} else {
Executor.registerErrorHandler();
boolean searchRepositories = false;
ArrayList<String> subFiles = new ArrayList<>();
ArrayList<String> subFilesList = new ArrayList<>();
ArrayList<String> repositories = new ArrayList<>();
HashSet<String> allowedSymlinks = new HashSet<>();
String configFilename = null;
String configHost = null;
boolean addProjects = false;
boolean refreshHistory = false;
String defaultProject = null;
boolean listFiles = false;
boolean listRepos = false;
boolean createDict = false;
int noThreads = 2 + (2 * Runtime.getRuntime().availableProcessors());
String host = null;
int port = 0;
// Parse command line options:
Getopt getopt = new Getopt(argv, cmdOptions.getCommandString());
try {
getopt.parse();
} catch (ParseException ex) {
System.err.println("OpenGrok: " + ex.getMessage());
System.err.println(cmdOptions.getUsage());
System.exit(1);
}
try {
Configuration cfg = null;
int cmd;
// will try to overwrite options..
while ((cmd = getopt.getOpt()) != -1) {
if (cmd == 'R') {
cfg = Configuration.read(new File(getopt.getOptarg()));
break;
}
}
if (cfg == null) {
cfg = new Configuration();
}
// Now we can handle all the other options..
getopt.reset();
while ((cmd = getopt.getOpt()) != -1) {
switch(cmd) {
case 'A':
{
String[] arg = getopt.getOptarg().split(":");
boolean prefix = false;
if (arg.length != 2) {
A_usage();
}
if (arg[0].endsWith(".")) {
arg[0] = arg[0].substring(0, arg[0].lastIndexOf('.')).toUpperCase();
prefix = true;
} else if (arg[0].startsWith(".")) {
arg[0] = arg[0].substring(arg[0].lastIndexOf('.') + 1).toUpperCase();
} else {
A_usage();
}
if (arg[1].equals("-")) {
if (prefix) {
AnalyzerGuru.addPrefix(arg[0], null);
} else {
AnalyzerGuru.addExtension(arg[0], null);
}
break;
}
if (prefix) {
try {
AnalyzerGuru.addPrefix(arg[0], AnalyzerGuru.findFactory(arg[1]));
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
LOGGER.log(Level.SEVERE, "Unable to use {0} as a FileAnalyzerFactory", arg[1]);
LOGGER.log(Level.SEVERE, "Stack: ", e.fillInStackTrace());
System.exit(1);
}
} else {
try {
AnalyzerGuru.addExtension(arg[0], AnalyzerGuru.findFactory(arg[1]));
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
LOGGER.log(Level.SEVERE, "Unable to use {0} as a FileAnalyzerFactory", arg[1]);
LOGGER.log(Level.SEVERE, "Stack: ", e.fillInStackTrace());
System.exit(1);
}
}
}
break;
case 'a':
if (getopt.getOptarg().equalsIgnoreCase(ON)) {
cfg.setAllowLeadingWildcard(true);
} else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
cfg.setAllowLeadingWildcard(false);
} else {
System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -a");
System.err.println(" Ex: \"-a on\" will allow a search to start with a wildcard");
System.err.println(" \"-a off\" will disallow a search to start with a wildcard");
System.exit(1);
}
break;
case 'B':
cfg.setUserPage(getopt.getOptarg());
break;
case 'C':
cfg.setPrintProgress(true);
break;
case 'c':
cfg.setCtags(getopt.getOptarg());
break;
case 'd':
{
File dataRoot = new File(getopt.getOptarg());
if (!dataRoot.exists() && !dataRoot.mkdirs()) {
System.err.println("ERROR: Cannot create data root");
System.exit(1);
}
if (!dataRoot.isDirectory()) {
System.err.println("ERROR: Data root must be a directory");
System.exit(1);
}
cfg.setDataRoot(dataRoot.getCanonicalPath());
break;
}
case 'e':
cfg.setGenerateHtml(false);
break;
case 'G':
cfg.setTagsEnabled(true);
break;
case 'H':
refreshHistory = true;
break;
case 'h':
repositories.add(getopt.getOptarg());
break;
case 'I':
cfg.getIncludedNames().add(getopt.getOptarg());
break;
case 'i':
cfg.getIgnoredNames().add(getopt.getOptarg());
break;
case 'K':
listRepos = true;
break;
case 'k':
zapCache.add(getopt.getOptarg());
break;
case 'L':
cfg.setWebappLAF(getopt.getOptarg());
break;
case 'l':
if (getopt.getOptarg().equalsIgnoreCase(ON)) {
cfg.setUsingLuceneLocking(true);
} else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
cfg.setUsingLuceneLocking(false);
} else {
System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -l");
System.err.println(" Ex: \"-l on\" will enable locks in Lucene");
System.err.println(" \"-l off\" will disable locks in Lucene");
}
break;
case 'm':
{
try {
cfg.setRamBufferSize(Double.parseDouble(getopt.getOptarg()));
} catch (NumberFormatException exp) {
System.err.println("ERROR: Failed to parse argument to \"-m\": " + exp.getMessage());
System.exit(1);
}
break;
}
case 'N':
allowedSymlinks.add(getopt.getOptarg());
break;
case 'n':
runIndex = false;
break;
case 'O':
{
boolean oldval = cfg.isOptimizeDatabase();
if (getopt.getOptarg().equalsIgnoreCase(ON)) {
cfg.setOptimizeDatabase(true);
} else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
cfg.setOptimizeDatabase(false);
} else {
System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -O");
System.err.println(" Ex: \"-O on\" will optimize the database as part of the index generation");
System.err.println(" \"-O off\" disable optimization of the index database");
}
if (oldval != cfg.isOptimizeDatabase()) {
optimizedChanged = true;
}
break;
}
case 'o':
String CTagsExtraOptionsFile = getopt.getOptarg();
File CTagsFile = new File(CTagsExtraOptionsFile);
if (!(CTagsFile.isFile() && CTagsFile.canRead())) {
System.err.println("ERROR: File '" + CTagsExtraOptionsFile + "' not found for the -o option");
System.exit(1);
}
System.err.println("INFO: file with extra " + "options for ctags: " + CTagsExtraOptionsFile);
cfg.setCTagsExtraOptionsFile(CTagsExtraOptionsFile);
break;
case 'P':
addProjects = true;
break;
case 'p':
defaultProject = getopt.getOptarg();
break;
case 'Q':
if (getopt.getOptarg().equalsIgnoreCase(ON)) {
cfg.setQuickContextScan(true);
} else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
cfg.setQuickContextScan(false);
} else {
System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -Q");
System.err.println(" Ex: \"-Q on\" will just scan a \"chunk\" of the file and insert \"[..all..]\"");
System.err.println(" \"-Q off\" will try to build a more accurate list by reading the complete file.");
}
break;
case 'q':
cfg.setVerbose(false);
LoggerUtil.setBaseConsoleLogLevel(Level.WARNING);
break;
case 'R':
// already handled
break;
case 'r':
if (getopt.getOptarg().equalsIgnoreCase(ON)) {
cfg.setRemoteScmSupported(Configuration.RemoteSCM.ON);
} else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
cfg.setRemoteScmSupported(Configuration.RemoteSCM.OFF);
} else if (getopt.getOptarg().equalsIgnoreCase(DIRBASED)) {
cfg.setRemoteScmSupported(Configuration.RemoteSCM.DIRBASED);
} else if (getopt.getOptarg().equalsIgnoreCase(UIONLY)) {
cfg.setRemoteScmSupported(Configuration.RemoteSCM.UIONLY);
} else {
System.err.println("ERROR: You should pass either \"on\" or \"off\" or \"uionly\" as argument to -r");
System.err.println(" Ex: \"-r on\" will allow retrieval for remote SCM systems");
System.err.println(" \"-r off\" will ignore SCM for remote systems");
System.err.println(" \"-r dirbased\" will allow retrieval during history index " + "only for repositories which allow getting history for directories");
System.err.println(" \"-r uionly\" will support remote SCM for UI only");
}
break;
case 'S':
searchRepositories = true;
break;
case 's':
{
File sourceRoot = new File(getopt.getOptarg());
if (!sourceRoot.isDirectory()) {
System.err.println("ERROR: Source root " + getopt.getOptarg() + " must be a directory");
System.exit(1);
}
cfg.setSourceRoot(sourceRoot.getCanonicalPath());
break;
}
case 'T':
try {
noThreads = Integer.parseInt(getopt.getOptarg());
} catch (NumberFormatException exp) {
System.err.println("ERROR: Failed to parse argument to \"-T\": " + exp.getMessage());
System.exit(1);
}
break;
case 't':
try {
int tmp = Integer.parseInt(getopt.getOptarg());
cfg.setTabSize(tmp);
} catch (NumberFormatException exp) {
System.err.println("ERROR: Failed to parse argument to \"-t\": " + exp.getMessage());
System.exit(1);
}
break;
case 'U':
configHost = getopt.getOptarg();
break;
case 'V':
System.out.println(Info.getFullVersion());
System.exit(0);
break;
case 'v':
cfg.setVerbose(true);
LoggerUtil.setBaseConsoleLogLevel(Level.INFO);
break;
case 'W':
configFilename = getopt.getOptarg();
break;
case 'w':
{
String webapp = getopt.getOptarg();
if (webapp.charAt(0) != '/' && !webapp.startsWith("http")) {
webapp = "/" + webapp;
}
if (webapp.endsWith("/")) {
cfg.setUrlPrefix(webapp + "s?");
} else {
cfg.setUrlPrefix(webapp + "/s?");
}
}
break;
case 'X':
cfg.setUserPageSuffix(getopt.getOptarg());
break;
case 'z':
try {
cfg.setScanningDepth(Integer.parseInt(getopt.getOptarg()));
} catch (NumberFormatException exp) {
System.err.println("ERROR: Failed to parse argument to \"-z\": " + exp.getMessage());
System.exit(1);
}
break;
case '?':
System.err.println(cmdOptions.getUsage());
System.exit(0);
break;
default:
System.err.println("Internal Error - Unimplemented cmdline option: " + (char) cmd);
System.exit(1);
}
}
if (configHost != null) {
String[] configHostArray = configHost.split(":");
if (configHostArray.length == 2) {
host = configHostArray[0];
try {
port = Integer.parseInt(configHostArray[1]);
} catch (NumberFormatException ex) {
System.err.println("Failed to parse: " + configHost);
System.exit(1);
}
} else {
System.err.println("Syntax error: ");
for (String s : configHostArray) {
System.err.println(s);
}
System.exit(1);
}
}
List<Class<? extends Repository>> repositoryClasses = RepositoryFactory.getRepositoryClasses();
for (Class<? extends Repository> clazz : repositoryClasses) {
try {
Field f = clazz.getDeclaredField("CMD_PROPERTY_KEY");
Object key = f.get(null);
if (key != null) {
cfg.setRepoCmd(clazz.getCanonicalName(), System.getProperty(key.toString()));
}
} catch (Exception e) {
// don't care
}
}
//logging starts here
if (cfg.isVerbose()) {
String fn = LoggerUtil.getFileHandlerPattern();
if (fn != null) {
System.out.println("Logging filehandler pattern: " + fn);
}
}
// automatically allow symlinks that are directly in source root
String file = cfg.getSourceRoot();
if (file != null) {
File sourceRootFile = new File(file);
File[] projectDirs = sourceRootFile.listFiles();
if (projectDirs != null) {
for (File projectDir : projectDirs) {
if (!projectDir.getCanonicalPath().equals(projectDir.getAbsolutePath())) {
allowedSymlinks.add(projectDir.getAbsolutePath());
}
}
}
}
allowedSymlinks.addAll(cfg.getAllowedSymlinks());
cfg.setAllowedSymlinks(allowedSymlinks);
// Assemble the unprocessed command line arguments (possibly
// a list of paths). This will be used to perform more fine
// grained checking in invalidateRepositories().
int optind = getopt.getOptind();
if (optind != -1) {
while (optind < argv.length) {
subFilesList.add(cfg.getSourceRoot() + argv[optind++]);
}
}
// Set updated configuration in RuntimeEnvironment.
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
env.setConfiguration(cfg, subFilesList);
/*
* Add paths to directories under source root. If projects
* are enabled the path should correspond to a project because
* project path is necessary to correctly set index directory
* (otherwise the index files will end up in index data root
* directory and not per project data root directory).
* For the check we need to have 'env' already set.
*/
for (String path : subFilesList) {
String srcPath = env.getSourceRootPath();
if (srcPath == null) {
System.err.println("Error getting source root from environment. Exiting.");
System.exit(1);
}
path = path.substring(srcPath.length());
if (env.hasProjects()) {
// The paths need to correspond to a project.
if (Project.getProject(path) != null) {
subFiles.add(path);
} else {
System.err.println("The path " + path + " does not correspond to a project");
}
} else {
subFiles.add(path);
}
}
if (!subFilesList.isEmpty() && subFiles.isEmpty()) {
System.err.println("None of the paths were added, exiting");
System.exit(1);
}
// Get history first.
getInstance().prepareIndexer(env, searchRepositories, addProjects, defaultProject, configFilename, refreshHistory, listFiles, createDict, subFiles, repositories, zapCache, listRepos);
if (listRepos || !zapCache.isEmpty()) {
return;
}
// And now index it all.
if (runIndex || (optimizedChanged && env.isOptimizeDatabase())) {
IndexChangedListener progress = new DefaultIndexChangedListener();
getInstance().doIndexerExecution(update, noThreads, subFiles, progress);
}
// or send new configuration to the web application in the case of full reindex.
if (host != null) {
if (!subFiles.isEmpty()) {
getInstance().refreshSearcherManagers(env, subFiles, host, port);
} else {
getInstance().sendToConfigHost(env, host, port);
}
}
} catch (IndexerException ex) {
LOGGER.log(Level.SEVERE, "Exception running indexer", ex);
System.err.println(cmdOptions.getUsage());
System.exit(1);
} catch (Throwable e) {
System.err.println("Exception: " + e.getLocalizedMessage());
LOGGER.log(Level.SEVERE, "Unexpected Exception", e);
System.exit(1);
} finally {
stats.report(LOGGER);
}
}
}
use of java.text.ParseException in project OpenGrok by OpenGrok.
the class Results method prettyPrint.
/**
* Prints out results in html form. The following search helper fields are
* required to be properly initialized: <ul>
* <li>{@link SearchHelper#dataRoot}</li>
* <li>{@link SearchHelper#contextPath}</li>
* <li>{@link SearchHelper#searcher}</li> <li>{@link SearchHelper#hits}</li>
* <li>{@link SearchHelper#historyContext} (ignored if {@code null})</li>
* <li>{@link SearchHelper#sourceContext} (ignored if {@code null})</li>
* <li>{@link SearchHelper#summarizer} (if sourceContext is not
* {@code null})</li> <li>{@link SearchHelper#compressed} (if sourceContext
* is not {@code null})</li> <li>{@link SearchHelper#sourceRoot} (if
* sourceContext or historyContext is not {@code null})</li> </ul>
*
* @param out write destination
* @param sh search helper which has all required fields set
* @param start index of the first hit to print
* @param end index of the last hit to print
* @throws HistoryException
* @throws IOException
* @throws ClassNotFoundException
*/
public static void prettyPrint(Writer out, SearchHelper sh, int start, int end) throws HistoryException, IOException, ClassNotFoundException {
Project p;
String ctxE = Util.URIEncodePath(sh.contextPath);
String xrefPrefix = sh.contextPath + Prefix.XREF_P;
String morePrefix = sh.contextPath + Prefix.MORE_P;
String xrefPrefixE = ctxE + Prefix.XREF_P;
File xrefDataDir = new File(sh.dataRoot, Prefix.XREF_P.toString());
for (Map.Entry<String, ArrayList<Document>> entry : createMap(sh.searcher, sh.hits, start, end).entrySet()) {
String parent = entry.getKey();
out.write("<tr class=\"dir\"><td colspan=\"3\"><a href=\"");
out.write(xrefPrefixE);
out.write(Util.URIEncodePath(parent));
out.write("/\">");
// htmlize ???
out.write(parent);
out.write("/</a>");
if (sh.desc != null) {
out.write(" - <i>");
// htmlize ???
out.write(sh.desc.get(parent));
out.write("</i>");
}
JSONArray messages;
if ((p = Project.getProject(parent)) != null && (messages = Util.messagesToJson(p, RuntimeEnvironment.MESSAGES_MAIN_PAGE_TAG)).size() > 0) {
out.write(" <a ");
out.write("href=\"" + xrefPrefix + "/" + p.getName() + "\">");
out.write("<span class=\"important-note important-note-rounded\" data-messages='" + messages + "'>!</span>");
out.write("</a>");
}
out.write("</td></tr>");
for (Document doc : entry.getValue()) {
String rpath = doc.get(QueryBuilder.PATH);
String rpathE = Util.URIEncodePath(rpath);
DateFormat df;
out.write("<tr>");
Util.writeHAD(out, sh.contextPath, rpathE, false);
out.write("<td class=\"f\"><a href=\"");
out.write(xrefPrefixE);
out.write(rpathE);
out.write("\"");
if (RuntimeEnvironment.getInstance().isLastEditedDisplayMode()) {
try {
// insert last edited date if possible
df = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
String dd = df.format(DateTools.stringToDate(doc.get("date")));
out.write(" class=\"result-annotate\" title=\"");
out.write("Last modified: ");
out.write(dd);
out.write("\"");
} catch (ParseException ex) {
LOGGER.log(Level.WARNING, "An error parsing date information", ex);
}
}
out.write(">");
// htmlize ???
out.write(rpath.substring(rpath.lastIndexOf('/') + 1));
out.write("</a>");
out.write("</td><td><tt class=\"con\">");
if (sh.sourceContext != null) {
Genre genre = Genre.get(doc.get("t"));
Definitions tags = null;
IndexableField tagsField = doc.getField(QueryBuilder.TAGS);
if (tagsField != null) {
tags = Definitions.deserialize(tagsField.binaryValue().bytes);
}
Scopes scopes;
IndexableField scopesField = doc.getField(QueryBuilder.SCOPES);
if (scopesField != null) {
scopes = Scopes.deserialize(scopesField.binaryValue().bytes);
} else {
scopes = new Scopes();
}
if (Genre.XREFABLE == genre && sh.summarizer != null) {
String xtags = getTags(xrefDataDir, rpath, sh.compressed);
// FIXME use Highlighter from lucene contrib here,
// instead of summarizer, we'd also get rid of
// apache lucene in whole source ...
out.write(sh.summarizer.getSummary(xtags).toString());
} else if (Genre.HTML == genre && sh.summarizer != null) {
String htags = getTags(sh.sourceRoot, rpath, false);
out.write(sh.summarizer.getSummary(htags).toString());
} else {
FileReader r = genre == Genre.PLAIN ? new FileReader(new File(sh.sourceRoot, rpath)) : null;
sh.sourceContext.getContext(r, out, xrefPrefix, morePrefix, rpath, tags, true, sh.builder.isDefSearch(), null, scopes);
}
}
if (sh.historyContext != null) {
sh.historyContext.getContext(new File(sh.sourceRoot, rpath), rpath, out, sh.contextPath);
}
out.write("</tt></td></tr>\n");
}
}
}
Aggregations