Search in sources :

Example 1 with Statistics

use of org.opensolaris.opengrok.util.Statistics in project OpenGrok by OpenGrok.

the class HistoryGuru method createCacheReal.

private void createCacheReal(Collection<Repository> repositories) {
    Statistics elapsed = new Statistics();
    ExecutorService executor = RuntimeEnvironment.getHistoryExecutor();
    // Since we know each repository object from the repositories
    // collection is unique, we can abuse HashMap to create a list of
    // repository,revision tuples with repository as key (as the revision
    // string does not have to be unique - surely it is not unique
    // for the initial index case).
    HashMap<Repository, String> repos2process = new HashMap<>();
    // do not have to deal with latch decrementing in the cycle below.
    for (final Repository repo : repositories) {
        final String latestRev;
        try {
            latestRev = historyCache.getLatestCachedRevision(repo);
            repos2process.put(repo, latestRev);
        } catch (HistoryException he) {
            LOGGER.log(Level.WARNING, String.format("Failed to retrieve latest cached revision for %s", repo.getDirectoryName()), he);
        }
    }
    LOGGER.log(Level.INFO, "Creating historycache for {0} repositories", repos2process.size());
    final CountDownLatch latch = new CountDownLatch(repos2process.size());
    for (final Map.Entry<Repository, String> entry : repos2process.entrySet()) {
        executor.submit(new Runnable() {

            @Override
            public void run() {
                try {
                    createCache(entry.getKey(), entry.getValue());
                } catch (Exception ex) {
                    // We want to catch any exception since we are in thread.
                    LOGGER.log(Level.WARNING, "createCacheReal() got exception{0}", ex);
                } finally {
                    latch.countDown();
                }
            }
        });
    }
    /*
         * Wait until the history of all repositories is done. This is necessary
         * since the next phase of generating index will need the history to
         * be ready as it is recorded in Lucene index.
         */
    try {
        latch.await();
    } catch (InterruptedException ex) {
        LOGGER.log(Level.SEVERE, "latch exception{0}", ex);
    }
    executor.shutdown();
    while (!executor.isTerminated()) {
        try {
            // Wait forever
            executor.awaitTermination(999, TimeUnit.DAYS);
        } catch (InterruptedException exp) {
            LOGGER.log(Level.WARNING, "Received interrupt while waiting for executor to finish", exp);
        }
    }
    RuntimeEnvironment.freeHistoryExecutor();
    try {
        /* Thread pool for handling renamed files needs to be destroyed too. */
        RuntimeEnvironment.destroyRenamedHistoryExecutor();
    } catch (InterruptedException ex) {
        LOGGER.log(Level.SEVERE, "destroying of renamed thread pool failed", ex);
    }
    // disk to enhance performance and save space.
    try {
        historyCache.optimize();
    } catch (HistoryException he) {
        LOGGER.log(Level.WARNING, "Failed optimizing the history cache database", he);
    }
    elapsed.report(LOGGER, "Done historycache for all repositories");
    historyCache.setHistoryIndexDone();
}
Also used : HashMap(java.util.HashMap) CountDownLatch(java.util.concurrent.CountDownLatch) Statistics(org.opensolaris.opengrok.util.Statistics) IOException(java.io.IOException) ExecutorService(java.util.concurrent.ExecutorService) HashMap(java.util.HashMap) Map(java.util.Map)

Example 2 with Statistics

use of org.opensolaris.opengrok.util.Statistics in project OpenGrok by OpenGrok.

the class Indexer method main.

/**
     * Program entry point
     *
     * @param argv argument vector
     */
@SuppressWarnings("PMD.UseStringBufferForStringAppends")
public static void main(String[] argv) {
    //this won't count JVM creation though
    Statistics stats = new Statistics();
    boolean runIndex = true;
    boolean update = true;
    boolean optimizedChanged = false;
    ArrayList<String> zapCache = new ArrayList<>();
    CommandLineOptions cmdOptions = new CommandLineOptions();
    if (argv.length == 0) {
        System.err.println(cmdOptions.getUsage());
        System.exit(1);
    } else {
        Executor.registerErrorHandler();
        boolean searchRepositories = false;
        ArrayList<String> subFiles = new ArrayList<>();
        ArrayList<String> subFilesList = new ArrayList<>();
        ArrayList<String> repositories = new ArrayList<>();
        HashSet<String> allowedSymlinks = new HashSet<>();
        String configFilename = null;
        String configHost = null;
        boolean addProjects = false;
        boolean refreshHistory = false;
        String defaultProject = null;
        boolean listFiles = false;
        boolean listRepos = false;
        boolean createDict = false;
        int noThreads = 2 + (2 * Runtime.getRuntime().availableProcessors());
        String host = null;
        int port = 0;
        // Parse command line options:
        Getopt getopt = new Getopt(argv, cmdOptions.getCommandString());
        try {
            getopt.parse();
        } catch (ParseException ex) {
            System.err.println("OpenGrok: " + ex.getMessage());
            System.err.println(cmdOptions.getUsage());
            System.exit(1);
        }
        try {
            Configuration cfg = null;
            int cmd;
            // will try to overwrite options..
            while ((cmd = getopt.getOpt()) != -1) {
                if (cmd == 'R') {
                    cfg = Configuration.read(new File(getopt.getOptarg()));
                    break;
                }
            }
            if (cfg == null) {
                cfg = new Configuration();
            }
            // Now we can handle all the other options..
            getopt.reset();
            while ((cmd = getopt.getOpt()) != -1) {
                switch(cmd) {
                    case 'A':
                        {
                            String[] arg = getopt.getOptarg().split(":");
                            boolean prefix = false;
                            if (arg.length != 2) {
                                A_usage();
                            }
                            if (arg[0].endsWith(".")) {
                                arg[0] = arg[0].substring(0, arg[0].lastIndexOf('.')).toUpperCase();
                                prefix = true;
                            } else if (arg[0].startsWith(".")) {
                                arg[0] = arg[0].substring(arg[0].lastIndexOf('.') + 1).toUpperCase();
                            } else {
                                A_usage();
                            }
                            if (arg[1].equals("-")) {
                                if (prefix) {
                                    AnalyzerGuru.addPrefix(arg[0], null);
                                } else {
                                    AnalyzerGuru.addExtension(arg[0], null);
                                }
                                break;
                            }
                            if (prefix) {
                                try {
                                    AnalyzerGuru.addPrefix(arg[0], AnalyzerGuru.findFactory(arg[1]));
                                } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
                                    LOGGER.log(Level.SEVERE, "Unable to use {0} as a FileAnalyzerFactory", arg[1]);
                                    LOGGER.log(Level.SEVERE, "Stack: ", e.fillInStackTrace());
                                    System.exit(1);
                                }
                            } else {
                                try {
                                    AnalyzerGuru.addExtension(arg[0], AnalyzerGuru.findFactory(arg[1]));
                                } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
                                    LOGGER.log(Level.SEVERE, "Unable to use {0} as a FileAnalyzerFactory", arg[1]);
                                    LOGGER.log(Level.SEVERE, "Stack: ", e.fillInStackTrace());
                                    System.exit(1);
                                }
                            }
                        }
                        break;
                    case 'a':
                        if (getopt.getOptarg().equalsIgnoreCase(ON)) {
                            cfg.setAllowLeadingWildcard(true);
                        } else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
                            cfg.setAllowLeadingWildcard(false);
                        } else {
                            System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -a");
                            System.err.println("       Ex: \"-a on\" will allow a search to start with a wildcard");
                            System.err.println("           \"-a off\" will disallow a search to start with a wildcard");
                            System.exit(1);
                        }
                        break;
                    case 'B':
                        cfg.setUserPage(getopt.getOptarg());
                        break;
                    case 'C':
                        cfg.setPrintProgress(true);
                        break;
                    case 'c':
                        cfg.setCtags(getopt.getOptarg());
                        break;
                    case 'd':
                        {
                            File dataRoot = new File(getopt.getOptarg());
                            if (!dataRoot.exists() && !dataRoot.mkdirs()) {
                                System.err.println("ERROR: Cannot create data root");
                                System.exit(1);
                            }
                            if (!dataRoot.isDirectory()) {
                                System.err.println("ERROR: Data root must be a directory");
                                System.exit(1);
                            }
                            cfg.setDataRoot(dataRoot.getCanonicalPath());
                            break;
                        }
                    case 'e':
                        cfg.setGenerateHtml(false);
                        break;
                    case 'G':
                        cfg.setTagsEnabled(true);
                        break;
                    case 'H':
                        refreshHistory = true;
                        break;
                    case 'h':
                        repositories.add(getopt.getOptarg());
                        break;
                    case 'I':
                        cfg.getIncludedNames().add(getopt.getOptarg());
                        break;
                    case 'i':
                        cfg.getIgnoredNames().add(getopt.getOptarg());
                        break;
                    case 'K':
                        listRepos = true;
                        break;
                    case 'k':
                        zapCache.add(getopt.getOptarg());
                        break;
                    case 'L':
                        cfg.setWebappLAF(getopt.getOptarg());
                        break;
                    case 'l':
                        if (getopt.getOptarg().equalsIgnoreCase(ON)) {
                            cfg.setUsingLuceneLocking(true);
                        } else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
                            cfg.setUsingLuceneLocking(false);
                        } else {
                            System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -l");
                            System.err.println("       Ex: \"-l on\" will enable locks in Lucene");
                            System.err.println("           \"-l off\" will disable locks in Lucene");
                        }
                        break;
                    case 'm':
                        {
                            try {
                                cfg.setRamBufferSize(Double.parseDouble(getopt.getOptarg()));
                            } catch (NumberFormatException exp) {
                                System.err.println("ERROR: Failed to parse argument to \"-m\": " + exp.getMessage());
                                System.exit(1);
                            }
                            break;
                        }
                    case 'N':
                        allowedSymlinks.add(getopt.getOptarg());
                        break;
                    case 'n':
                        runIndex = false;
                        break;
                    case 'O':
                        {
                            boolean oldval = cfg.isOptimizeDatabase();
                            if (getopt.getOptarg().equalsIgnoreCase(ON)) {
                                cfg.setOptimizeDatabase(true);
                            } else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
                                cfg.setOptimizeDatabase(false);
                            } else {
                                System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -O");
                                System.err.println("       Ex: \"-O on\" will optimize the database as part of the index generation");
                                System.err.println("           \"-O off\" disable optimization of the index database");
                            }
                            if (oldval != cfg.isOptimizeDatabase()) {
                                optimizedChanged = true;
                            }
                            break;
                        }
                    case 'o':
                        String CTagsExtraOptionsFile = getopt.getOptarg();
                        File CTagsFile = new File(CTagsExtraOptionsFile);
                        if (!(CTagsFile.isFile() && CTagsFile.canRead())) {
                            System.err.println("ERROR: File '" + CTagsExtraOptionsFile + "' not found for the -o option");
                            System.exit(1);
                        }
                        System.err.println("INFO: file with extra " + "options for ctags: " + CTagsExtraOptionsFile);
                        cfg.setCTagsExtraOptionsFile(CTagsExtraOptionsFile);
                        break;
                    case 'P':
                        addProjects = true;
                        break;
                    case 'p':
                        defaultProject = getopt.getOptarg();
                        break;
                    case 'Q':
                        if (getopt.getOptarg().equalsIgnoreCase(ON)) {
                            cfg.setQuickContextScan(true);
                        } else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
                            cfg.setQuickContextScan(false);
                        } else {
                            System.err.println("ERROR: You should pass either \"on\" or \"off\" as argument to -Q");
                            System.err.println("       Ex: \"-Q on\" will just scan a \"chunk\" of the file and insert \"[..all..]\"");
                            System.err.println("           \"-Q off\" will try to build a more accurate list by reading the complete file.");
                        }
                        break;
                    case 'q':
                        cfg.setVerbose(false);
                        LoggerUtil.setBaseConsoleLogLevel(Level.WARNING);
                        break;
                    case 'R':
                        // already handled
                        break;
                    case 'r':
                        if (getopt.getOptarg().equalsIgnoreCase(ON)) {
                            cfg.setRemoteScmSupported(Configuration.RemoteSCM.ON);
                        } else if (getopt.getOptarg().equalsIgnoreCase(OFF)) {
                            cfg.setRemoteScmSupported(Configuration.RemoteSCM.OFF);
                        } else if (getopt.getOptarg().equalsIgnoreCase(DIRBASED)) {
                            cfg.setRemoteScmSupported(Configuration.RemoteSCM.DIRBASED);
                        } else if (getopt.getOptarg().equalsIgnoreCase(UIONLY)) {
                            cfg.setRemoteScmSupported(Configuration.RemoteSCM.UIONLY);
                        } else {
                            System.err.println("ERROR: You should pass either \"on\" or \"off\" or \"uionly\" as argument to -r");
                            System.err.println("       Ex: \"-r on\" will allow retrieval for remote SCM systems");
                            System.err.println("           \"-r off\" will ignore SCM for remote systems");
                            System.err.println("           \"-r dirbased\" will allow retrieval during history index " + "only for repositories which allow getting history for directories");
                            System.err.println("           \"-r uionly\" will support remote SCM for UI only");
                        }
                        break;
                    case 'S':
                        searchRepositories = true;
                        break;
                    case 's':
                        {
                            File sourceRoot = new File(getopt.getOptarg());
                            if (!sourceRoot.isDirectory()) {
                                System.err.println("ERROR: Source root " + getopt.getOptarg() + " must be a directory");
                                System.exit(1);
                            }
                            cfg.setSourceRoot(sourceRoot.getCanonicalPath());
                            break;
                        }
                    case 'T':
                        try {
                            noThreads = Integer.parseInt(getopt.getOptarg());
                        } catch (NumberFormatException exp) {
                            System.err.println("ERROR: Failed to parse argument to \"-T\": " + exp.getMessage());
                            System.exit(1);
                        }
                        break;
                    case 't':
                        try {
                            int tmp = Integer.parseInt(getopt.getOptarg());
                            cfg.setTabSize(tmp);
                        } catch (NumberFormatException exp) {
                            System.err.println("ERROR: Failed to parse argument to \"-t\": " + exp.getMessage());
                            System.exit(1);
                        }
                        break;
                    case 'U':
                        configHost = getopt.getOptarg();
                        break;
                    case 'V':
                        System.out.println(Info.getFullVersion());
                        System.exit(0);
                        break;
                    case 'v':
                        cfg.setVerbose(true);
                        LoggerUtil.setBaseConsoleLogLevel(Level.INFO);
                        break;
                    case 'W':
                        configFilename = getopt.getOptarg();
                        break;
                    case 'w':
                        {
                            String webapp = getopt.getOptarg();
                            if (webapp.charAt(0) != '/' && !webapp.startsWith("http")) {
                                webapp = "/" + webapp;
                            }
                            if (webapp.endsWith("/")) {
                                cfg.setUrlPrefix(webapp + "s?");
                            } else {
                                cfg.setUrlPrefix(webapp + "/s?");
                            }
                        }
                        break;
                    case 'X':
                        cfg.setUserPageSuffix(getopt.getOptarg());
                        break;
                    case 'z':
                        try {
                            cfg.setScanningDepth(Integer.parseInt(getopt.getOptarg()));
                        } catch (NumberFormatException exp) {
                            System.err.println("ERROR: Failed to parse argument to \"-z\": " + exp.getMessage());
                            System.exit(1);
                        }
                        break;
                    case '?':
                        System.err.println(cmdOptions.getUsage());
                        System.exit(0);
                        break;
                    default:
                        System.err.println("Internal Error - Unimplemented cmdline option: " + (char) cmd);
                        System.exit(1);
                }
            }
            if (configHost != null) {
                String[] configHostArray = configHost.split(":");
                if (configHostArray.length == 2) {
                    host = configHostArray[0];
                    try {
                        port = Integer.parseInt(configHostArray[1]);
                    } catch (NumberFormatException ex) {
                        System.err.println("Failed to parse: " + configHost);
                        System.exit(1);
                    }
                } else {
                    System.err.println("Syntax error: ");
                    for (String s : configHostArray) {
                        System.err.println(s);
                    }
                    System.exit(1);
                }
            }
            List<Class<? extends Repository>> repositoryClasses = RepositoryFactory.getRepositoryClasses();
            for (Class<? extends Repository> clazz : repositoryClasses) {
                try {
                    Field f = clazz.getDeclaredField("CMD_PROPERTY_KEY");
                    Object key = f.get(null);
                    if (key != null) {
                        cfg.setRepoCmd(clazz.getCanonicalName(), System.getProperty(key.toString()));
                    }
                } catch (Exception e) {
                // don't care
                }
            }
            //logging starts here
            if (cfg.isVerbose()) {
                String fn = LoggerUtil.getFileHandlerPattern();
                if (fn != null) {
                    System.out.println("Logging filehandler pattern: " + fn);
                }
            }
            // automatically allow symlinks that are directly in source root
            String file = cfg.getSourceRoot();
            if (file != null) {
                File sourceRootFile = new File(file);
                File[] projectDirs = sourceRootFile.listFiles();
                if (projectDirs != null) {
                    for (File projectDir : projectDirs) {
                        if (!projectDir.getCanonicalPath().equals(projectDir.getAbsolutePath())) {
                            allowedSymlinks.add(projectDir.getAbsolutePath());
                        }
                    }
                }
            }
            allowedSymlinks.addAll(cfg.getAllowedSymlinks());
            cfg.setAllowedSymlinks(allowedSymlinks);
            // Assemble the unprocessed command line arguments (possibly
            // a list of paths). This will be used to perform more fine
            // grained checking in invalidateRepositories().
            int optind = getopt.getOptind();
            if (optind != -1) {
                while (optind < argv.length) {
                    subFilesList.add(cfg.getSourceRoot() + argv[optind++]);
                }
            }
            // Set updated configuration in RuntimeEnvironment.
            RuntimeEnvironment env = RuntimeEnvironment.getInstance();
            env.setConfiguration(cfg, subFilesList);
            /*
                 * Add paths to directories under source root. If projects
                 * are enabled the path should correspond to a project because
                 * project path is necessary to correctly set index directory
                 * (otherwise the index files will end up in index data root
                 * directory and not per project data root directory).
                 * For the check we need to have 'env' already set.
                 */
            for (String path : subFilesList) {
                String srcPath = env.getSourceRootPath();
                if (srcPath == null) {
                    System.err.println("Error getting source root from environment. Exiting.");
                    System.exit(1);
                }
                path = path.substring(srcPath.length());
                if (env.hasProjects()) {
                    // The paths need to correspond to a project.
                    if (Project.getProject(path) != null) {
                        subFiles.add(path);
                    } else {
                        System.err.println("The path " + path + " does not correspond to a project");
                    }
                } else {
                    subFiles.add(path);
                }
            }
            if (!subFilesList.isEmpty() && subFiles.isEmpty()) {
                System.err.println("None of the paths were added, exiting");
                System.exit(1);
            }
            // Get history first.
            getInstance().prepareIndexer(env, searchRepositories, addProjects, defaultProject, configFilename, refreshHistory, listFiles, createDict, subFiles, repositories, zapCache, listRepos);
            if (listRepos || !zapCache.isEmpty()) {
                return;
            }
            // And now index it all.
            if (runIndex || (optimizedChanged && env.isOptimizeDatabase())) {
                IndexChangedListener progress = new DefaultIndexChangedListener();
                getInstance().doIndexerExecution(update, noThreads, subFiles, progress);
            }
            // or send new configuration to the web application in the case of full reindex.
            if (host != null) {
                if (!subFiles.isEmpty()) {
                    getInstance().refreshSearcherManagers(env, subFiles, host, port);
                } else {
                    getInstance().sendToConfigHost(env, host, port);
                }
            }
        } catch (IndexerException ex) {
            LOGGER.log(Level.SEVERE, "Exception running indexer", ex);
            System.err.println(cmdOptions.getUsage());
            System.exit(1);
        } catch (Throwable e) {
            System.err.println("Exception: " + e.getLocalizedMessage());
            LOGGER.log(Level.SEVERE, "Unexpected Exception", e);
            System.exit(1);
        } finally {
            stats.report(LOGGER);
        }
    }
}
Also used : Configuration(org.opensolaris.opengrok.configuration.Configuration) ArrayList(java.util.ArrayList) Getopt(org.opensolaris.opengrok.util.Getopt) Field(java.lang.reflect.Field) HashSet(java.util.HashSet) RuntimeEnvironment(org.opensolaris.opengrok.configuration.RuntimeEnvironment) Statistics(org.opensolaris.opengrok.util.Statistics) HistoryException(org.opensolaris.opengrok.history.HistoryException) ParseException(java.text.ParseException) IOException(java.io.IOException) Repository(org.opensolaris.opengrok.history.Repository) ParseException(java.text.ParseException) File(java.io.File)

Example 3 with Statistics

use of org.opensolaris.opengrok.util.Statistics in project OpenGrok by OpenGrok.

the class HistoryGuru method createCache.

private void createCache(Repository repository, String sinceRevision) {
    String path = repository.getDirectoryName();
    String type = repository.getClass().getSimpleName();
    if (repository.isWorking()) {
        boolean verbose = RuntimeEnvironment.getInstance().isVerbose();
        Statistics elapsed = new Statistics();
        if (verbose) {
            LOGGER.log(Level.INFO, "Creating historycache for {0} ({1})", new Object[] { path, type });
        }
        try {
            repository.createCache(historyCache, sinceRevision);
        } catch (Exception e) {
            LOGGER.log(Level.WARNING, "An error occured while creating cache for " + path + " (" + type + ")", e);
        }
        if (verbose) {
            elapsed.report(LOGGER, "Done historycache for " + path);
        }
    } else {
        LOGGER.log(Level.WARNING, "Skipping creation of historycache of {0} repository in {1}: Missing SCM dependencies?", new Object[] { type, path });
    }
}
Also used : Statistics(org.opensolaris.opengrok.util.Statistics) IOException(java.io.IOException)

Example 4 with Statistics

use of org.opensolaris.opengrok.util.Statistics in project OpenGrok by OpenGrok.

the class HistoryGuru method invalidateRepositories.

/**
     * Invalidate list of known repositories.
     *
     * @param repos The new repositories
     */
public void invalidateRepositories(Collection<? extends RepositoryInfo> repos) {
    if (repos == null || repos.isEmpty()) {
        repositories.clear();
    } else {
        Map<String, Repository> newrepos = Collections.synchronizedMap(new HashMap<>(repos.size()));
        Statistics elapsed = new Statistics();
        boolean verbose = RuntimeEnvironment.getInstance().isVerbose();
        if (verbose) {
            LOGGER.log(Level.FINE, "invalidating repositories");
        }
        /*
             * getRepository() below does various checks of the repository
             * which involves executing commands and I/O so make the checks
             * run in parallel to speed up the process.
             */
        final CountDownLatch latch = new CountDownLatch(repos.size());
        final ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), new ThreadFactory() {

            @Override
            public Thread newThread(Runnable runnable) {
                Thread thread = Executors.defaultThreadFactory().newThread(runnable);
                thread.setName("invalidate-repos-" + thread.getId());
                return thread;
            }
        });
        for (RepositoryInfo i : repos) {
            executor.submit(new Runnable() {

                @Override
                public void run() {
                    try {
                        Repository r = RepositoryFactory.getRepository(i);
                        if (r == null) {
                            LOGGER.log(Level.WARNING, "Failed to instantiate internal repository data for {0} in {1}", new Object[] { i.getType(), i.getDirectoryName() });
                        } else {
                            newrepos.put(r.getDirectoryName(), r);
                        }
                    } catch (Exception ex) {
                        // We want to catch any exception since we are in thread.
                        LOGGER.log(Level.WARNING, "Could not create " + i.getType() + " for '" + i.getDirectoryName(), ex);
                    } finally {
                        latch.countDown();
                    }
                }
            });
        }
        // Wait until all repositories are validated.
        try {
            latch.await();
        } catch (InterruptedException ex) {
            LOGGER.log(Level.SEVERE, "latch exception{0}", ex);
        }
        executor.shutdown();
        repositories = newrepos;
        if (verbose) {
            elapsed.report(LOGGER, "done invalidating repositories");
        }
    }
}
Also used : ThreadFactory(java.util.concurrent.ThreadFactory) CountDownLatch(java.util.concurrent.CountDownLatch) Statistics(org.opensolaris.opengrok.util.Statistics) IOException(java.io.IOException) ExecutorService(java.util.concurrent.ExecutorService)

Example 5 with Statistics

use of org.opensolaris.opengrok.util.Statistics in project OpenGrok by OpenGrok.

the class Indexer method doIndexerExecution.

/*
     * This is the second phase of the indexer which generates Lucene index
     * by passing source code files through Exuberant ctags, generating xrefs
     * and storing data from the source files in the index (along with history,
     * if any).
     */
public void doIndexerExecution(final boolean update, int noThreads, List<String> subFiles, IndexChangedListener progress) throws IOException {
    Statistics elapsed = new Statistics();
    RuntimeEnvironment env = RuntimeEnvironment.getInstance().register();
    LOGGER.info("Starting indexing");
    ExecutorService executor = Executors.newFixedThreadPool(noThreads);
    if (subFiles == null || subFiles.isEmpty()) {
        if (update) {
            IndexDatabase.updateAll(executor, progress);
        } else if (env.isOptimizeDatabase()) {
            IndexDatabase.optimizeAll(executor);
        }
    } else {
        List<IndexDatabase> dbs = new ArrayList<>();
        for (String path : subFiles) {
            Project project = Project.getProject(path);
            if (project == null && env.hasProjects()) {
                LOGGER.log(Level.WARNING, "Could not find a project for \"{0}\"", path);
            } else {
                IndexDatabase db;
                if (project == null) {
                    db = new IndexDatabase();
                } else {
                    db = new IndexDatabase(project);
                }
                int idx = dbs.indexOf(db);
                if (idx != -1) {
                    db = dbs.get(idx);
                }
                if (db.addDirectory(path)) {
                    if (idx == -1) {
                        dbs.add(db);
                    }
                } else {
                    LOGGER.log(Level.WARNING, "Directory does not exist \"{0}\"", path);
                }
            }
        }
        for (final IndexDatabase db : dbs) {
            final boolean optimize = env.isOptimizeDatabase();
            db.addIndexChangedListener(progress);
            executor.submit(new Runnable() {

                @Override
                public void run() {
                    try {
                        if (update) {
                            db.update();
                        } else if (optimize) {
                            db.optimize();
                        }
                    } catch (Throwable e) {
                        LOGGER.log(Level.SEVERE, "An error occured while " + (update ? "updating" : "optimizing") + " index", e);
                    }
                }
            });
        }
    }
    executor.shutdown();
    while (!executor.isTerminated()) {
        try {
            // Wait forever
            executor.awaitTermination(999, TimeUnit.DAYS);
        } catch (InterruptedException exp) {
            LOGGER.log(Level.WARNING, "Received interrupt while waiting for executor to finish", exp);
        }
    }
    try {
        // It can happen that history index is not done in prepareIndexer()
        // but via db.update() above in which case we must make sure the
        // thread pool for renamed file handling is destroyed.
        RuntimeEnvironment.destroyRenamedHistoryExecutor();
    } catch (InterruptedException ex) {
        LOGGER.log(Level.SEVERE, "destroying of renamed thread pool failed", ex);
    }
    elapsed.report(LOGGER, "Done indexing data of all repositories");
}
Also used : RuntimeEnvironment(org.opensolaris.opengrok.configuration.RuntimeEnvironment) ArrayList(java.util.ArrayList) Statistics(org.opensolaris.opengrok.util.Statistics) Project(org.opensolaris.opengrok.configuration.Project) ExecutorService(java.util.concurrent.ExecutorService)

Aggregations

Statistics (org.opensolaris.opengrok.util.Statistics)5 IOException (java.io.IOException)4 ExecutorService (java.util.concurrent.ExecutorService)3 ArrayList (java.util.ArrayList)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 RuntimeEnvironment (org.opensolaris.opengrok.configuration.RuntimeEnvironment)2 File (java.io.File)1 Field (java.lang.reflect.Field)1 ParseException (java.text.ParseException)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Map (java.util.Map)1 ThreadFactory (java.util.concurrent.ThreadFactory)1 Configuration (org.opensolaris.opengrok.configuration.Configuration)1 Project (org.opensolaris.opengrok.configuration.Project)1 HistoryException (org.opensolaris.opengrok.history.HistoryException)1 Repository (org.opensolaris.opengrok.history.Repository)1 Getopt (org.opensolaris.opengrok.util.Getopt)1