Search in sources :

Example 61 with Project

use of org.opensolaris.opengrok.configuration.Project in project OpenGrok by OpenGrok.

the class Indexer method prepareIndexer.

/*
     * This is the first phase of the indexing where history cache is being
     * generated for repositories (at least for those which support getting
     * history per directory).
     *
     * PMD wants us to use length() > 0 && charAt(0) instead of startsWith()
     * for performance. We prefer clarity over performance here, so silence it.
     */
@SuppressWarnings("PMD.SimplifyStartsWith")
public void prepareIndexer(RuntimeEnvironment env, boolean searchRepositories, boolean addProjects, Set<String> defaultProjects, boolean listFiles, boolean createDict, List<String> subFiles, List<String> repositories, List<String> zapCache, boolean listRepoPaths) throws IndexerException, IOException {
    if (env.getDataRootPath() == null) {
        throw new IndexerException("ERROR: Please specify a DATA ROOT path");
    }
    if (env.getSourceRootFile() == null) {
        throw new IndexerException("ERROR: please specify a SRC_ROOT with option -s !");
    }
    if (zapCache.isEmpty() && !env.validateExuberantCtags()) {
        throw new IndexerException("Didn't find Exuberant Ctags");
    }
    if (zapCache == null) {
        throw new IndexerException("Internal error, zapCache shouldn't be null");
    }
    if (searchRepositories || listRepoPaths || !zapCache.isEmpty()) {
        LOGGER.log(Level.INFO, "Scanning for repositories...");
        long start = System.currentTimeMillis();
        if (env.isHistoryEnabled()) {
            env.setRepositories(env.getSourceRootPath());
        }
        long time = (System.currentTimeMillis() - start) / 1000;
        LOGGER.log(Level.INFO, "Done scanning for repositories ({0}s)", time);
        if (listRepoPaths || !zapCache.isEmpty()) {
            List<RepositoryInfo> repos = env.getRepositories();
            String prefix = env.getSourceRootPath();
            if (listRepoPaths) {
                if (repos.isEmpty()) {
                    System.out.println("No repositories found.");
                    return;
                }
                System.out.println("Repositories in " + prefix + ":");
                for (RepositoryInfo info : env.getRepositories()) {
                    String dir = info.getDirectoryName();
                    System.out.println(dir.substring(prefix.length()));
                }
            }
            if (!zapCache.isEmpty()) {
                HashSet<String> toZap = new HashSet<>(zapCache.size() << 1);
                boolean all = false;
                for (String repo : zapCache) {
                    if ("*".equals(repo)) {
                        all = true;
                        break;
                    }
                    if (repo.startsWith(prefix)) {
                        repo = repo.substring(prefix.length());
                    }
                    toZap.add(repo);
                }
                if (all) {
                    toZap.clear();
                    for (RepositoryInfo info : env.getRepositories()) {
                        toZap.add(info.getDirectoryName().substring(prefix.length()));
                    }
                }
                try {
                    HistoryGuru.getInstance().removeCache(toZap);
                } catch (HistoryException e) {
                    LOGGER.log(Level.WARNING, "Clearing history cache failed: {0}", e.getLocalizedMessage());
                }
            }
            return;
        }
    }
    if (addProjects) {
        File[] files = env.getSourceRootFile().listFiles();
        Map<String, Project> projects = env.getProjects();
        // Keep a copy of the old project list so that we can preserve
        // the customization of existing projects.
        Map<String, Project> oldProjects = new HashMap<>();
        for (Project p : projects.values()) {
            oldProjects.put(p.getName(), p);
        }
        projects.clear();
        // Add a project for each top-level directory in source root.
        for (File file : files) {
            String name = file.getName();
            String path = "/" + name;
            if (oldProjects.containsKey(name)) {
                // This is an existing object. Reuse the old project,
                // possibly with customizations, instead of creating a
                // new with default values.
                Project p = oldProjects.get(name);
                p.setPath(path);
                p.setName(name);
                p.completeWithDefaults(env.getConfiguration());
                projects.put(name, p);
            } else if (!name.startsWith(".") && file.isDirectory()) {
                // Found a new directory with no matching project, so
                // create a new project with default properties.
                projects.put(name, new Project(name, path, env.getConfiguration()));
            }
        }
    }
    if (defaultProjects != null && !defaultProjects.isEmpty()) {
        Set<Project> projects = new TreeSet<>();
        for (String projectPath : defaultProjects) {
            if (projectPath.equals("__all__")) {
                projects.addAll(env.getProjects().values());
                break;
            }
            for (Project p : env.getProjectList()) {
                if (p.getPath().equals(projectPath)) {
                    projects.add(p);
                    break;
                }
            }
        }
        if (!projects.isEmpty()) {
            env.setDefaultProjects(projects);
        }
    }
    if (env.isHistoryEnabled()) {
        if (repositories != null && !repositories.isEmpty()) {
            LOGGER.log(Level.INFO, "Generating history cache for repositories: " + repositories.stream().collect(Collectors.joining(",")));
            HistoryGuru.getInstance().createCache(repositories);
            LOGGER.info("Done...");
        } else {
            LOGGER.log(Level.INFO, "Generating history cache for all repositories ...");
            HistoryGuru.getInstance().createCache();
            LOGGER.info("Done...");
        }
    }
    if (listFiles) {
        for (String file : IndexDatabase.getAllFiles(subFiles)) {
            LOGGER.fine(file);
        }
    }
    if (createDict) {
        IndexDatabase.listFrequentTokens(subFiles);
    }
}
Also used : RepositoryInfo(org.opensolaris.opengrok.history.RepositoryInfo) HashMap(java.util.HashMap) HistoryException(org.opensolaris.opengrok.history.HistoryException) Project(org.opensolaris.opengrok.configuration.Project) TreeSet(java.util.TreeSet) File(java.io.File) HashSet(java.util.HashSet)

Example 62 with Project

use of org.opensolaris.opengrok.configuration.Project in project OpenGrok by OpenGrok.

the class Indexer method doIndexerExecution.

/**
 * This is the second phase of the indexer which generates Lucene index
 * by passing source code files through ctags, generating xrefs
 * and storing data from the source files in the index (along with history,
 * if any).
 *
 * @param update if set to true, index database is updated, otherwise optimized
 * @param subFiles index just some subdirectories
 * @param progress object to receive notifications as indexer progress is made
 * @throws IOException if I/O exception occurred
 */
public void doIndexerExecution(final boolean update, List<String> subFiles, IndexChangedListener progress) throws IOException {
    Statistics elapsed = new Statistics();
    RuntimeEnvironment env = RuntimeEnvironment.getInstance().register();
    LOGGER.info("Starting indexing");
    IndexerParallelizer parallelizer = new IndexerParallelizer(env);
    if (subFiles == null || subFiles.isEmpty()) {
        if (update) {
            IndexDatabase.updateAll(parallelizer, progress);
        } else if (env.isOptimizeDatabase()) {
            IndexDatabase.optimizeAll(parallelizer);
        }
    } else {
        List<IndexDatabase> dbs = new ArrayList<>();
        for (String path : subFiles) {
            Project project = Project.getProject(path);
            if (project == null && env.hasProjects()) {
                LOGGER.log(Level.WARNING, "Could not find a project for \"{0}\"", path);
            } else {
                IndexDatabase db;
                if (project == null) {
                    db = new IndexDatabase();
                } else {
                    db = new IndexDatabase(project);
                }
                int idx = dbs.indexOf(db);
                if (idx != -1) {
                    db = dbs.get(idx);
                }
                if (db.addDirectory(path)) {
                    if (idx == -1) {
                        dbs.add(db);
                    }
                } else {
                    LOGGER.log(Level.WARNING, "Directory does not exist \"{0}\"", path);
                }
            }
        }
        for (final IndexDatabase db : dbs) {
            final boolean optimize = env.isOptimizeDatabase();
            db.addIndexChangedListener(progress);
            parallelizer.getFixedExecutor().submit(new Runnable() {

                @Override
                public void run() {
                    try {
                        if (update) {
                            db.update(parallelizer);
                        } else if (optimize) {
                            db.optimize();
                        }
                    } catch (Throwable e) {
                        LOGGER.log(Level.SEVERE, "An error occurred while " + (update ? "updating" : "optimizing") + " index", e);
                    }
                }
            });
        }
    }
    parallelizer.getFixedExecutor().shutdown();
    while (!parallelizer.getFixedExecutor().isTerminated()) {
        try {
            // Wait forever
            parallelizer.getFixedExecutor().awaitTermination(999, TimeUnit.DAYS);
        } catch (InterruptedException exp) {
            LOGGER.log(Level.WARNING, "Received interrupt while waiting for executor to finish", exp);
        }
    }
    try {
        // It can happen that history index is not done in prepareIndexer()
        // but via db.update() above in which case we must make sure the
        // thread pool for renamed file handling is destroyed.
        RuntimeEnvironment.destroyRenamedHistoryExecutor();
    } catch (InterruptedException ex) {
        LOGGER.log(Level.SEVERE, "destroying of renamed thread pool failed", ex);
    }
    try {
        parallelizer.close();
    } catch (Exception ex) {
        LOGGER.log(Level.SEVERE, "parallelizer.close() failed", ex);
    }
    elapsed.report(LOGGER, "Done indexing data of all repositories");
}
Also used : RuntimeEnvironment(org.opensolaris.opengrok.configuration.RuntimeEnvironment) ArrayList(java.util.ArrayList) Statistics(org.opensolaris.opengrok.util.Statistics) HistoryException(org.opensolaris.opengrok.history.HistoryException) ConnectException(java.net.ConnectException) ParseException(java.text.ParseException) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) Project(org.opensolaris.opengrok.configuration.Project)

Example 63 with Project

use of org.opensolaris.opengrok.configuration.Project in project OpenGrok by OpenGrok.

the class ProjectMessageTest method testValidate.

@Test
public void testValidate() {
    Message m = new ProjectMessage();
    Assert.assertFalse(MessageTest.assertValid(m));
    m.addTag("foo");
    Assert.assertFalse(MessageTest.assertValid(m));
    m.setText("text");
    Assert.assertFalse(MessageTest.assertValid(m));
    m.setText(null);
    Assert.assertFalse(MessageTest.assertValid(m));
    m.setTags(new TreeSet<String>());
    m.addTag("mercurial");
    m.setText("add");
    Assert.assertTrue(MessageTest.assertValid(m));
    m.setText("indexed");
    Assert.assertTrue(MessageTest.assertValid(m));
    // The deletion will validate even though the project is not present
    // in the project map in the configuration. This is because such extended
    // validation is performed only when the message is being applied.
    m.setText("delete");
    Assert.assertTrue(MessageTest.assertValid(m));
    try {
        m.apply(env);
        Assert.assertTrue(true);
    } catch (Exception ex) {
        System.err.println("got exception: " + ex);
    }
    // Now add the project to the map and re-apply the message. This time
    // it should not end up with exception.
    String projectName = "mercurial";
    env.getProjects().put(projectName, new Project(projectName, "/" + projectName));
    try {
        m.apply(env);
    } catch (Exception ex) {
        Assert.assertTrue(true);
        System.err.println("got exception: " + ex);
    }
}
Also used : Project(org.opensolaris.opengrok.configuration.Project) IOException(java.io.IOException) Test(org.junit.Test) MercurialRepositoryTest(org.opensolaris.opengrok.history.MercurialRepositoryTest)

Example 64 with Project

use of org.opensolaris.opengrok.configuration.Project in project OpenGrok by OpenGrok.

the class ProjectMessageTest method testDelete.

/**
 * This test needs to perform indexing so that it can be verified that
 * the delete message handling performs removal of the index data.
 * @throws Exception
 */
@Test
public void testDelete() throws Exception {
    String[] projectsToDelete = { "git", "svn" };
    assertTrue("No point in running indexer tests without valid ctags", RuntimeEnvironment.getInstance().validateExuberantCtags());
    // Add a group matching the project to be added.
    String groupName = "gitgroup";
    Group group = new Group(groupName, "git.*");
    env.getGroups().add(group);
    Assert.assertTrue(env.hasGroups());
    Assert.assertEquals(1, env.getGroups().stream().filter(g -> g.getName().equals(groupName)).collect(Collectors.toSet()).size());
    Assert.assertEquals(0, group.getRepositories().size());
    Assert.assertEquals(0, group.getProjects().size());
    // Firstly add some projects.
    Message m = new ProjectMessage();
    m.setText("add");
    m.addTag("mercurial");
    m.addTag("git");
    m.addTag("svn");
    Assert.assertEquals(0, env.getProjects().size());
    Assert.assertEquals(0, env.getRepositories().size());
    Assert.assertEquals(0, env.getProjectRepositoriesMap().size());
    m.apply(env);
    Assert.assertEquals(3, env.getProjects().size());
    Assert.assertEquals(3, env.getRepositories().size());
    Assert.assertEquals(3, env.getProjectRepositoriesMap().size());
    // Check the group was populated properly.
    Assert.assertEquals(1, group.getRepositories().size());
    Assert.assertEquals(0, group.getProjects().size());
    Assert.assertEquals(1, group.getRepositories().stream().filter(p -> p.getName().equals("git")).collect(Collectors.toSet()).size());
    // Run the indexer (ala 'indexpart') so that data directory is populated.
    ArrayList<String> subFiles = new ArrayList<>();
    subFiles.add("/git");
    subFiles.add("/mercurial");
    subFiles.add("/svn");
    ArrayList<String> repos = new ArrayList<>();
    repos.add("/git");
    repos.add("/mercurial");
    repos.add("/svn");
    // This is necessary so that repositories in HistoryGuru get populated.
    // When 'indexpart' is run, this is called from setConfiguration() because
    // of the -R option is present.
    HistoryGuru.getInstance().invalidateRepositories(env.getRepositories());
    env.setHistoryEnabled(true);
    Indexer.getInstance().prepareIndexer(env, // don't search for repositories
    false, // don't scan and add projects
    false, // no default project
    null, // don't list files
    false, // don't create dictionary
    false, // subFiles - needed when refreshing history partially
    subFiles, // repositories - needed when refreshing history partially
    repos, // don't zap cache
    new ArrayList<>(), // don't list repos
    false);
    Indexer.getInstance().doIndexerExecution(true, null, null);
    // Then remove multiple projects.
    m.setText("delete");
    m.setTags(new TreeSet<String>());
    for (String p : projectsToDelete) {
        m.addTag(p);
    }
    m.apply(env);
    Assert.assertEquals(1, env.getProjects().size());
    Assert.assertEquals(1, env.getRepositories().size());
    Assert.assertEquals(1, env.getProjectRepositoriesMap().size());
    // Test data removal.
    File dataRoot = env.getDataRootFile();
    for (String projectName : projectsToDelete) {
        for (String dirName : new String[] { "historycache", IndexDatabase.XREF_DIR, IndexDatabase.INDEX_DIR }) {
            File dir = new File(env.getDataRootFile(), dirName + File.separator + projectName);
            Assert.assertFalse(dir.exists());
        }
    }
    // Check that HistoryGuru no longer maintains the removed projects.
    for (String p : projectsToDelete) {
        Assert.assertFalse(HistoryGuru.getInstance().getRepositories().stream().map(ri -> ri.getDirectoryName()).collect(Collectors.toSet()).contains(repository.getSourceRoot() + File.separator + p));
    }
    // Check the group no longer contains the removed project.
    Assert.assertEquals(0, group.getRepositories().size());
    Assert.assertEquals(0, group.getProjects().size());
}
Also used : IOUtils.removeRecursive(org.opensolaris.opengrok.util.IOUtils.removeRecursive) HistoryGuru(org.opensolaris.opengrok.history.HistoryGuru) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) Indexer(org.opensolaris.opengrok.index.Indexer) RepositoryInstalled(org.opensolaris.opengrok.condition.RepositoryInstalled) GitRepository(org.opensolaris.opengrok.history.GitRepository) Project(org.opensolaris.opengrok.configuration.Project) RuntimeEnvironment(org.opensolaris.opengrok.configuration.RuntimeEnvironment) After(org.junit.After) Assume(org.junit.Assume) ConditionalRun(org.opensolaris.opengrok.condition.ConditionalRun) Group(org.opensolaris.opengrok.configuration.Group) Before(org.junit.Before) MercurialRepository(org.opensolaris.opengrok.history.MercurialRepository) SubversionRepository(org.opensolaris.opengrok.history.SubversionRepository) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) Test(org.junit.Test) Collectors(java.util.stream.Collectors) File(java.io.File) TestRepository(org.opensolaris.opengrok.util.TestRepository) List(java.util.List) IndexDatabase(org.opensolaris.opengrok.index.IndexDatabase) RepositoryFactory(org.opensolaris.opengrok.history.RepositoryFactory) MercurialRepositoryTest(org.opensolaris.opengrok.history.MercurialRepositoryTest) Assert(org.junit.Assert) RepositoryInfo(org.opensolaris.opengrok.history.RepositoryInfo) Group(org.opensolaris.opengrok.configuration.Group) ArrayList(java.util.ArrayList) File(java.io.File) Test(org.junit.Test) MercurialRepositoryTest(org.opensolaris.opengrok.history.MercurialRepositoryTest)

Example 65 with Project

use of org.opensolaris.opengrok.configuration.Project in project OpenGrok by OpenGrok.

the class IndexerTest method testIncrementalIndexAddRemoveFile.

/**
 * Test IndexChangedListener behavior in repository with invalid files.
 * @throws Exception
 */
@Test
public void testIncrementalIndexAddRemoveFile() throws Exception {
    RuntimeEnvironment env = RuntimeEnvironment.getInstance();
    env.setSourceRoot(repository.getSourceRoot());
    env.setDataRoot(repository.getDataRoot());
    if (env.validateExuberantCtags()) {
        String ppath = "/bug3430";
        Project project = new Project("bug3430", ppath);
        IndexDatabase idb = new IndexDatabase(project);
        assertNotNull(idb);
        MyIndexChangeListener listener = new MyIndexChangeListener();
        idb.addIndexChangedListener(listener);
        idb.update(parallelizer);
        assertEquals(1, listener.files.size());
        listener.reset();
        repository.addDummyFile(ppath);
        idb.update(parallelizer);
        assertEquals("No new file added", 1, listener.files.size());
        repository.removeDummyFile(ppath);
        idb.update(parallelizer);
        assertEquals("(added)files changed unexpectedly", 1, listener.files.size());
        assertEquals("Didn't remove the dummy file", 1, listener.removedFiles.size());
        assertEquals("Should have added then removed the same file", listener.files.peek(), listener.removedFiles.peek());
    } else {
        System.out.println("Skipping test. Could not find a ctags I could use in path.");
    }
}
Also used : Project(org.opensolaris.opengrok.configuration.Project) RuntimeEnvironment(org.opensolaris.opengrok.configuration.RuntimeEnvironment) Test(org.junit.Test)

Aggregations

Project (org.opensolaris.opengrok.configuration.Project)79 Test (org.junit.Test)40 RuntimeEnvironment (org.opensolaris.opengrok.configuration.RuntimeEnvironment)31 File (java.io.File)20 ArrayList (java.util.ArrayList)20 Group (org.opensolaris.opengrok.configuration.Group)17 RepositoryInfo (org.opensolaris.opengrok.history.RepositoryInfo)14 IOException (java.io.IOException)12 TreeSet (java.util.TreeSet)12 HistoryException (org.opensolaris.opengrok.history.HistoryException)8 List (java.util.List)6 ParseException (java.text.ParseException)5 HashMap (java.util.HashMap)5 Map (java.util.Map)5 Set (java.util.Set)5 Collectors (java.util.stream.Collectors)5 HttpServletRequest (javax.servlet.http.HttpServletRequest)5 Repository (org.opensolaris.opengrok.history.Repository)5 TestRepository (org.opensolaris.opengrok.util.TestRepository)5 ConnectException (java.net.ConnectException)4