use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class DuplicateArtifactsConsumerTest method setUp.
@Before
@Override
public void setUp() throws Exception {
super.setUp();
assertNotNull(consumer);
config = new BasicManagedRepository(TEST_REPO, TEST_REPO, Paths.get("target"));
config.setLocation(Paths.get("target/test-repository").toAbsolutePath().toUri());
metadataRepository = mock(MetadataRepository.class);
RepositorySession session = mock(RepositorySession.class);
when(session.getRepository()).thenReturn(metadataRepository);
RepositorySessionFactory factory = applicationContext.getBean(RepositorySessionFactory.class);
// (RepositorySessionFactory) lookup( RepositorySessionFactory.class );
when(factory.createSession()).thenReturn(session);
when(pathTranslator.getArtifactForPath(TEST_REPO, TEST_FILE)).thenReturn(TEST_METADATA);
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class CleanupReleasedSnapshotsRepositoryPurge method process.
@Override
public void process(String path) throws RepositoryPurgeException {
try {
Path artifactFile = Paths.get(repository.getRepoRoot(), path);
if (!Files.exists(artifactFile)) {
// Nothing to do here, file doesn't exist, skip it.
return;
}
ArtifactReference artifactRef = repository.toArtifactReference(path);
if (!VersionUtil.isSnapshot(artifactRef.getVersion())) {
// Nothing to do here, not a snapshot, skip it.
return;
}
ProjectReference reference = new ProjectReference();
reference.setGroupId(artifactRef.getGroupId());
reference.setArtifactId(artifactRef.getArtifactId());
// Gether the released versions
List<String> releasedVersions = new ArrayList<>();
Collection<org.apache.archiva.repository.ManagedRepository> repos = repositoryRegistry.getManagedRepositories();
for (org.apache.archiva.repository.ManagedRepository repo : repos) {
if (repo.getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE)) {
try {
ManagedRepositoryContent repoContent = repo.getContent();
for (String version : repoContent.getVersions(reference)) {
if (!VersionUtil.isSnapshot(version)) {
releasedVersions.add(version);
}
}
} catch (RepositoryException e) {
// swallow
}
}
}
Collections.sort(releasedVersions, VersionComparator.getInstance());
// Now clean out any version that is earlier than the highest released version.
boolean needsMetadataUpdate = false;
VersionedReference versionRef = new VersionedReference();
versionRef.setGroupId(artifactRef.getGroupId());
versionRef.setArtifactId(artifactRef.getArtifactId());
MetadataRepository metadataRepository = repositorySession.getRepository();
if (releasedVersions.contains(VersionUtil.getReleaseVersion(artifactRef.getVersion()))) {
versionRef.setVersion(artifactRef.getVersion());
repository.deleteVersion(versionRef);
for (RepositoryListener listener : listeners) {
listener.deleteArtifact(metadataRepository, repository.getId(), artifactRef.getGroupId(), artifactRef.getArtifactId(), artifactRef.getVersion(), artifactFile.getFileName().toString());
}
metadataRepository.removeProjectVersion(repository.getId(), artifactRef.getGroupId(), artifactRef.getArtifactId(), artifactRef.getVersion());
needsMetadataUpdate = true;
}
if (needsMetadataUpdate) {
updateMetadata(artifactRef);
}
} catch (LayoutException e) {
log.debug("Not processing file that is not an artifact: {}", e.getMessage());
} catch (ContentNotFoundException e) {
throw new RepositoryPurgeException(e.getMessage(), e);
} catch (MetadataRepositoryException e) {
log.error("Could not remove metadata during cleanup of released snapshots of {}", path, e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class ArchivaMetadataCreationConsumer method processFile.
@Override
public void processFile(String path) throws ConsumerException {
RepositorySession repositorySession = repositorySessionFactory.createSession();
try {
// note that we do minimal processing including checksums and POM information for performance of
// the initial scan. Any request for this information will be intercepted and populated on-demand
// or picked up by subsequent scans
ArtifactMetadata artifact = repositoryStorage.readArtifactMetadataFromPath(repoId, path);
ProjectMetadata project = new ProjectMetadata();
project.setNamespace(artifact.getNamespace());
project.setId(artifact.getProject());
String projectVersion = VersionUtil.getBaseVersion(artifact.getVersion());
MetadataRepository metadataRepository = repositorySession.getRepository();
boolean createVersionMetadata = false;
// FIXME: maybe not too efficient since it may have already been read and stored for this artifact
ProjectVersionMetadata versionMetadata = null;
try {
ReadMetadataRequest readMetadataRequest = new ReadMetadataRequest().repositoryId(repoId).namespace(artifact.getNamespace()).projectId(artifact.getProject()).projectVersion(projectVersion);
versionMetadata = repositoryStorage.readProjectVersionMetadata(readMetadataRequest);
createVersionMetadata = true;
} catch (RepositoryStorageMetadataNotFoundException e) {
log.warn("Missing or invalid POM for artifact:{} (repository:{}); creating empty metadata", path, repoId);
versionMetadata = new ProjectVersionMetadata();
versionMetadata.setId(projectVersion);
versionMetadata.setIncomplete(true);
createVersionMetadata = true;
} catch (RepositoryStorageMetadataInvalidException e) {
log.warn("Error occurred resolving POM for artifact:{} (repository:{}); message: {}", new Object[] { path, repoId, e.getMessage() });
}
// read the metadata and update it if it is newer or doesn't exist
artifact.setWhenGathered(whenGathered);
metadataRepository.updateArtifact(repoId, project.getNamespace(), project.getId(), projectVersion, artifact);
if (createVersionMetadata) {
metadataRepository.updateProjectVersion(repoId, project.getNamespace(), project.getId(), versionMetadata);
}
metadataRepository.updateProject(repoId, project);
repositorySession.save();
} catch (MetadataRepositoryException e) {
log.warn("Error occurred persisting metadata for artifact:{} (repository:{}); message: {}", path, repoId, e.getMessage(), e);
repositorySession.revert();
} catch (RepositoryStorageRuntimeException e) {
log.warn("Error occurred persisting metadata for artifact:{} (repository:{}); message: {}", path, repoId, e.getMessage(), e);
repositorySession.revert();
} finally {
repositorySession.close();
}
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class AbstractRepositoryPurge method purge.
/**
* Purge the repo. Update db and index of removed artifacts.
*
* @param references
*/
protected void purge(Set<ArtifactReference> references) {
if (references != null && !references.isEmpty()) {
MetadataRepository metadataRepository = repositorySession.getRepository();
Map<ArtifactInfo, ArtifactMetadata> metaRemovalList = new HashMap<>();
Map<String, Collection<ArtifactMetadata>> metaResolved = new HashMap<>();
for (ArtifactReference reference : references) {
String baseVersion = VersionUtil.getBaseVersion(reference.getVersion());
// Needed for tracking in the hashmap
String metaBaseId = reference.getGroupId() + "/" + reference.getArtifactId() + "/" + baseVersion;
if (!metaResolved.containsKey(metaBaseId)) {
try {
metaResolved.put(metaBaseId, metadataRepository.getArtifacts(repository.getId(), reference.getGroupId(), reference.getArtifactId(), baseVersion));
} catch (MetadataResolutionException e) {
log.error("Error during metadata retrieval {}: {}", metaBaseId, e.getMessage());
}
}
Path artifactFile = repository.toFile(reference);
for (RepositoryListener listener : listeners) {
listener.deleteArtifact(metadataRepository, repository.getId(), reference.getGroupId(), reference.getArtifactId(), reference.getVersion(), artifactFile.getFileName().toString());
}
try {
Files.delete(artifactFile);
log.debug("File deleted: {}", artifactFile.toAbsolutePath());
} catch (IOException e) {
log.error("Could not delete file {}: {}", artifactFile.toAbsolutePath(), e.getMessage(), e);
continue;
}
try {
repository.deleteArtifact(reference);
} catch (ContentNotFoundException e) {
log.warn("skip error deleting artifact {}: {}", reference, e.getMessage());
}
boolean snapshotVersion = VersionUtil.isSnapshot(reference.getVersion());
// If this is a snapshot we have to search for artifacts with the same version. And remove all of them.
if (snapshotVersion) {
Collection<ArtifactMetadata> artifacts = metaResolved.get(metaBaseId);
if (artifacts != null) {
// cleanup snapshots metadata
for (ArtifactMetadata artifactMetadata : artifacts) {
// Artifact metadata and reference version should match.
if (artifactMetadata.getVersion().equals(reference.getVersion())) {
ArtifactInfo info = new ArtifactInfo(artifactMetadata.getNamespace(), artifactMetadata.getProject(), artifactMetadata.getProjectVersion(), artifactMetadata.getVersion());
if (StringUtils.isNotBlank(reference.getClassifier())) {
info.setClassifier(reference.getClassifier());
metaRemovalList.put(info, artifactMetadata);
} else {
// metadataRepository.removeArtifact( artifactMetadata, baseVersion );
metaRemovalList.put(info, artifactMetadata);
}
}
}
}
} else // otherwise we delete the artifact version
{
ArtifactInfo info = new ArtifactInfo(reference.getGroupId(), reference.getArtifactId(), baseVersion, reference.getVersion());
for (ArtifactMetadata metadata : metaResolved.get(metaBaseId)) {
metaRemovalList.put(info, metadata);
}
}
triggerAuditEvent(repository.getRepository().getId(), ArtifactReference.toKey(reference), AuditEvent.PURGE_ARTIFACT);
purgeSupportFiles(artifactFile);
}
purgeMetadata(metadataRepository, metaRemovalList);
repositorySession.save();
}
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class DefaultManagedRepositoryAdmin method deleteManagedRepository.
private Boolean deleteManagedRepository(ManagedRepositoryConfiguration repository, boolean deleteContent, Configuration config, boolean stagedOne) throws RepositoryAdminException {
if (!stagedOne) {
RepositorySession repositorySession = getRepositorySessionFactory().createSession();
try {
MetadataRepository metadataRepository = repositorySession.getRepository();
metadataRepository.removeRepository(repository.getId());
// invalidate cache
namespacesCache.remove(repository.getId());
log.debug("call repositoryStatisticsManager.deleteStatistics");
getRepositoryStatisticsManager().deleteStatistics(metadataRepository, repository.getId());
repositorySession.save();
} catch (MetadataRepositoryException e) {
// throw new RepositoryAdminException( e.getMessage(), e );
log.warn("skip error during removing repository from MetadataRepository:{}", e.getMessage(), e);
} finally {
repositorySession.close();
}
}
if (deleteContent) {
// TODO could be async ? as directory can be huge
Path dir = Paths.get(repository.getLocation());
org.apache.archiva.common.utils.FileUtils.deleteQuietly(dir);
}
// olamy: copy list for reading as a unit test in webapp fail with ConcurrentModificationException
List<ProxyConnectorConfiguration> proxyConnectors = new ArrayList<>(config.getProxyConnectors());
for (ProxyConnectorConfiguration proxyConnector : proxyConnectors) {
if (StringUtils.equals(proxyConnector.getSourceRepoId(), repository.getId())) {
config.removeProxyConnector(proxyConnector);
}
}
Map<String, List<String>> repoToGroupMap = config.getRepositoryToGroupMap();
if (repoToGroupMap != null) {
if (repoToGroupMap.containsKey(repository.getId())) {
List<String> repoGroups = repoToGroupMap.get(repository.getId());
for (String repoGroup : repoGroups) {
// copy to prevent UnsupportedOperationException
RepositoryGroupConfiguration repositoryGroupConfiguration = config.findRepositoryGroupById(repoGroup);
List<String> repos = new ArrayList<>(repositoryGroupConfiguration.getRepositories());
config.removeRepositoryGroup(repositoryGroupConfiguration);
repos.remove(repository.getId());
repositoryGroupConfiguration.setRepositories(repos);
config.addRepositoryGroup(repositoryGroupConfiguration);
}
}
}
try {
removeRepositoryRoles(repository);
} catch (RoleManagerException e) {
throw new RepositoryAdminException("fail to remove repository roles for repository " + repository.getId() + " : " + e.getMessage(), e);
}
try {
final RepositoryRegistry reg = getRepositoryRegistry();
if (reg.getManagedRepository(repository.getId()) != null) {
reg.removeRepository(reg.getManagedRepository(repository.getId()));
}
} catch (RepositoryException e) {
throw new RepositoryAdminException("Removal of repository " + repository.getId() + " failed: " + e.getMessage());
}
saveConfiguration(config);
return Boolean.TRUE;
}
Aggregations