use of org.apache.archiva.repository.Repository in project archiva by apache.
the class MavenIndexManager method mergeContexts.
@Override
public ArchivaIndexingContext mergeContexts(Repository destinationRepo, List<ArchivaIndexingContext> contexts, boolean packIndex) throws UnsupportedOperationException, IndexCreationFailedException, IllegalArgumentException {
if (!destinationRepo.supportsFeature(IndexCreationFeature.class)) {
throw new IllegalArgumentException("The given repository does not support the indexcreation feature");
}
Path mergedIndexDirectory = null;
try {
mergedIndexDirectory = Files.createTempDirectory("archivaMergedIndex");
} catch (IOException e) {
log.error("Could not create temporary directory for merged index: {}", e.getMessage(), e);
throw new IndexCreationFailedException("IO error while creating temporary directory for merged index: " + e.getMessage(), e);
}
IndexCreationFeature indexCreationFeature = destinationRepo.getFeature(IndexCreationFeature.class);
if (indexCreationFeature.getLocalIndexPath() == null) {
throw new IllegalArgumentException("The given repository does not have a local index path");
}
StorageAsset destinationPath = indexCreationFeature.getLocalIndexPath();
String tempRepoId = mergedIndexDirectory.getFileName().toString();
try {
Path indexLocation = destinationPath.getFilePath();
List<IndexingContext> members = contexts.stream().filter(ctx -> ctx.supports(IndexingContext.class)).map(ctx -> {
try {
return ctx.getBaseContext(IndexingContext.class);
} catch (UnsupportedBaseContextException e) {
// does not happen here
return null;
}
}).filter(Objects::nonNull).collect(Collectors.toList());
ContextMemberProvider memberProvider = new StaticContextMemberProvider(members);
IndexingContext mergedCtx = indexer.createMergedIndexingContext(tempRepoId, tempRepoId, mergedIndexDirectory.toFile(), indexLocation.toFile(), true, memberProvider);
mergedCtx.optimize();
if (packIndex) {
IndexPackingRequest request = new //
IndexPackingRequest(//
mergedCtx, //
mergedCtx.acquireIndexSearcher().getIndexReader(), indexLocation.toFile());
indexPacker.packIndex(request);
}
return new MavenIndexContext(destinationRepo, mergedCtx);
} catch (IOException e) {
throw new IndexCreationFailedException("IO Error during index merge: " + e.getMessage(), e);
}
}
use of org.apache.archiva.repository.Repository in project archiva by apache.
the class MavenIndexManager method reset.
@Override
public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
ArchivaIndexingContext ctx;
executeUpdateFunction(context, indexingContext -> {
try {
indexingContext.close(true);
} catch (IOException e) {
log.warn("Index close failed");
}
org.apache.archiva.repository.storage.util.StorageUtil.deleteRecursively(context.getPath());
});
try {
Repository repo = context.getRepository();
ctx = createContext(context.getRepository());
if (repo instanceof EditableRepository) {
((EditableRepository) repo).setIndexingContext(ctx);
}
} catch (IndexCreationFailedException e) {
throw new IndexUpdateFailedException("Could not create index");
}
return ctx;
}
use of org.apache.archiva.repository.Repository in project archiva by apache.
the class MavenRepositorySearch method addIndexingContexts.
/**
* @param selectedRepos
* @return indexing contextId used
*/
private List<String> addIndexingContexts(List<String> selectedRepos) {
Set<String> indexingContextIds = new HashSet<>();
for (String repo : selectedRepos) {
try {
Repository rRepo = repositoryRegistry.getRepository(repo);
if (rRepo != null) {
if (rRepo.getType().equals(RepositoryType.MAVEN)) {
assert rRepo.getIndexingContext() != null;
IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
if (context.isSearchable()) {
indexingContextIds.addAll(getRemoteIndexingContextIds(repo));
indexingContextIds.add(context.getId());
} else {
log.warn("indexingContext with id {} not searchable", rRepo.getId());
}
}
} else {
log.warn("Repository '{}' not found in configuration.", repo);
}
} catch (RepositorySearchException e) {
log.warn("RepositorySearchException occured while accessing index of repository '{}' : {}", repo, e.getMessage());
continue;
} catch (UnsupportedBaseContextException e) {
log.error("Fatal situation: Maven repository without IndexingContext found.");
continue;
}
}
return new ArrayList<>(indexingContextIds);
}
use of org.apache.archiva.repository.Repository in project archiva by apache.
the class NewVersionsOfArtifactRssFeedProcessor method processNewVersionsOfArtifact.
private SyndFeed processNewVersionsOfArtifact(String groupId, String artifactId) throws FeedException {
List<ArtifactMetadata> artifacts = new ArrayList<>();
try (RepositorySession session = repositorySessionFactory.createSession()) {
final MetadataRepository metadataRepository = session.getRepository();
for (Repository repo : repositoryRegistry.getRepositories()) {
final String repoId = repo.getId();
Collection<String> versions = metadataRepository.getProjectVersions(session, repoId, groupId, artifactId);
for (String version : versions) {
artifacts.addAll(metadataRepository.getArtifacts(session, repoId, groupId, artifactId, version));
}
}
} catch (MetadataRepositoryException e) {
throw new FeedException("Unable to construct feed, metadata could not be retrieved: " + e.getMessage(), e);
} catch (MetadataResolutionException e) {
throw new FeedException("Unable to construct feed, metadata could not be retrieved: " + e.getMessage(), e);
}
long tmp = 0;
RssFeedEntry entry = null;
List<RssFeedEntry> entries = new ArrayList<>();
String description = "";
int idx = 0;
for (ArtifactMetadata artifact : artifacts) {
long whenGathered = artifact.getWhenGathered().toInstant().toEpochMilli();
if (tmp != whenGathered) {
if (entry != null) {
entry.setDescription(description);
entries.add(entry);
entry = null;
}
entry = new RssFeedEntry(this.getTitle() + "\'" + groupId + ":" + artifactId + "\'" + " as of " + new Date(whenGathered));
entry.setPublishedDate(Date.from(artifact.getWhenGathered().toInstant()));
description = this.getDescription() + "\'" + groupId + ":" + artifactId + "\'" + ": \n" + artifact.getId() + " | ";
} else {
description = description + artifact.getId() + " | ";
}
if (idx == (artifacts.size() - 1)) {
entry.setDescription(description);
entries.add(entry);
}
tmp = whenGathered;
idx++;
}
String key = groupId + ":" + artifactId;
return generator.generateFeed(getTitle() + "\'" + key + "\'", "New versions of artifact " + "\'" + key + "\' found during repository scan.", entries);
}
use of org.apache.archiva.repository.Repository in project archiva by apache.
the class NewVersionsOfArtifactRssFeedProcessorTest method setUp.
@Before
@Override
public void setUp() throws Exception {
super.setUp();
newVersionsProcessor = new NewVersionsOfArtifactRssFeedProcessor();
newVersionsProcessor.setGenerator(new RssFeedGenerator());
metadataRepository = mock(MetadataRepository.class);
sessionFactory = mock(RepositorySessionFactory.class);
session = mock(RepositorySession.class);
when(sessionFactory.createSession()).thenReturn(session);
when(session.getRepository()).thenReturn(metadataRepository);
repositoryRegistry = mock(ArchivaRepositoryRegistry.class);
List<Repository> reg = new ArrayList<>();
reg.add(new BasicManagedRepository(TEST_REPO, TEST_REPO, new FilesystemStorage(Paths.get("target/test-storage"), new DefaultFileLockManager())));
when(repositoryRegistry.getRepositories()).thenReturn(reg);
newVersionsProcessor.setRepositorySessionFactory(sessionFactory);
newVersionsProcessor.setRepositoryRegistry(repositoryRegistry);
}
Aggregations