use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class AuditManagerTest method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
auditManager = new DefaultAuditManager();
metadataRepositoryControl = EasyMock.createControl();
metadataRepository = metadataRepositoryControl.createMock(MetadataRepository.class);
ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
repository.setId(TEST_REPO_ID);
repository.setLocation("");
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class ArchivaRepositoryScanningTaskExecutor method executeTask.
@SuppressWarnings("unchecked")
@Override
public void executeTask(RepositoryTask task) throws TaskExecutionException {
try {
// TODO: replace this whole class with the prescribed content scanning service/action
// - scan repository for artifacts that do not have corresponding metadata or have been updated and
// send events for each
// - scan metadata for artifacts that have been removed and send events for each
// - scan metadata for missing plugin data
// - store information so that it can restart upon failure (publish event on the server recovery
// queue, remove it on successful completion)
this.task = task;
String repoId = task.getRepositoryId();
if (StringUtils.isBlank(repoId)) {
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
}
ManagedRepository arepo = repositoryRegistry.getManagedRepository(repoId);
// execute consumers on resource file if set
if (task.getResourceFile() != null) {
log.debug("Executing task from queue with job name: {}", task);
consumers.executeConsumers(arepo, task.getResourceFile(), task.isUpdateRelatedArtifacts());
} else {
log.info("Executing task from queue with job name: {}", task);
// otherwise, execute consumers on whole repository
if (arepo == null) {
throw new TaskExecutionException("Unable to execute RepositoryTask with invalid repository id: " + repoId);
}
long sinceWhen = RepositoryScanner.FRESH_SCAN;
long previousFileCount = 0;
RepositorySession repositorySession = repositorySessionFactory.createSession();
MetadataRepository metadataRepository = repositorySession.getRepository();
try {
if (!task.isScanAll()) {
RepositoryStatistics previousStats = repositoryStatisticsManager.getLastStatistics(metadataRepository, repoId);
if (previousStats != null) {
sinceWhen = previousStats.getScanStartTime().getTime();
previousFileCount = previousStats.getTotalFileCount();
}
}
RepositoryScanStatistics stats;
try {
stats = repoScanner.scan(arepo, sinceWhen);
} catch (RepositoryScannerException e) {
throw new TaskExecutionException("Repository error when executing repository job.", e);
}
log.info("Finished first scan: {}", stats.toDump(arepo));
// further statistics will be populated by the following method
Date endTime = new Date(stats.getWhenGathered().getTime() + stats.getDuration());
log.info("Gathering repository statistics");
repositoryStatisticsManager.addStatisticsAfterScan(metadataRepository, repoId, stats.getWhenGathered(), endTime, stats.getTotalFileCount(), stats.getTotalFileCount() - previousFileCount);
repositorySession.save();
} catch (MetadataRepositoryException e) {
throw new TaskExecutionException("Unable to store updated statistics: " + e.getMessage(), e);
} finally {
repositorySession.close();
}
// log.info( "Scanning for removed repository content" );
// metadataRepository.findAllProjects();
// FIXME: do something
log.info("Finished repository task: {}", task);
this.task = null;
}
} catch (RepositoryAdminException e) {
log.error(e.getMessage(), e);
throw new TaskExecutionException(e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class AbstractArchivaRepositoryScanningTaskExecutorTest method setUp.
@Before
@Override
public void setUp() throws Exception {
super.setUp();
Path sourceRepoDir = Paths.get("src/test/repositories/default-repository");
repoDir = Paths.get("target/default-repository");
org.apache.archiva.common.utils.FileUtils.deleteDirectory(repoDir);
assertFalse("Default Test Repository should not exist.", Files.exists(repoDir));
Files.createDirectories(repoDir);
FileUtils.copyDirectoryStructure(sourceRepoDir.toFile(), repoDir.toFile());
// set the timestamps to a time well in the past
Calendar cal = Calendar.getInstance();
cal.add(Calendar.YEAR, -1);
try (Stream<Path> stream = Files.walk(repoDir, FileVisitOption.FOLLOW_LINKS)) {
stream.forEach(path -> {
try {
Files.setLastModifiedTime(path, FileTime.fromMillis(cal.getTimeInMillis()));
} catch (IOException e) {
e.printStackTrace();
}
});
}
PathMatcher m = FileSystems.getDefault().getPathMatcher("glob:**/.svn");
Files.walk(repoDir, FileVisitOption.FOLLOW_LINKS).filter(Files::isDirectory).sorted(Comparator.reverseOrder()).filter(path -> m.matches(path)).forEach(path -> org.apache.archiva.common.utils.FileUtils.deleteQuietly(path));
assertTrue("Default Test Repository should exist.", Files.exists(repoDir) && Files.isDirectory(repoDir));
assertNotNull(archivaConfig);
// Create it
ManagedRepositoryConfiguration repositoryConfiguration = new ManagedRepositoryConfiguration();
repositoryConfiguration.setId(TEST_REPO_ID);
repositoryConfiguration.setName("Test Repository");
repositoryConfiguration.setLocation(repoDir.toAbsolutePath().toString());
for (ManagedRepository repo : repositoryRegistry.getManagedRepositories()) {
repositoryRegistry.removeRepository(repo);
}
repositoryRegistry.putRepository(repositoryConfiguration);
metadataRepository = mock(MetadataRepository.class);
factory.setRepository(metadataRepository);
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class RepositoryProblemEventListener method addArtifact.
@Override
public void addArtifact(RepositorySession session, String repoId, String namespace, String projectId, ProjectVersionMetadata metadata) {
// Remove problems associated with this version on successful addition
// TODO: this removes all problems - do we need something that just remove the problems we know are corrected?
String name = RepositoryProblemFacet.createName(namespace, projectId, metadata.getId(), null);
try {
MetadataRepository metadataRepository = session.getRepository();
metadataRepository.removeMetadataFacet(repoId, RepositoryProblemFacet.FACET_ID, name);
session.markDirty();
} catch (MetadataRepositoryException e) {
log.warn("Unable to remove repository problem facets for the version being corrected in the repository: {}", e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepository in project archiva by apache.
the class DuplicateArtifactsConsumer method processFile.
@Override
public void processFile(String path) throws ConsumerException {
Path artifactFile = this.repositoryDir.resolve(path);
// TODO: would be quicker to somehow make sure it ran after the update database consumer, or as a part of that
// perhaps could use an artifact context that is retained for all consumers? First in can set the SHA-1
// alternatively this could come straight from the storage resolver, which could populate the artifact metadata
// in the later parse call with the desired checksum and use that
String checksumSha1;
ChecksummedFile checksummedFile = new ChecksummedFile(artifactFile);
try {
checksumSha1 = checksummedFile.calculateChecksum(ChecksumAlgorithm.SHA1);
} catch (IOException e) {
throw new ConsumerException(e.getMessage(), e);
}
MetadataRepository metadataRepository = repositorySession.getRepository();
Collection<ArtifactMetadata> results;
try {
results = metadataRepository.getArtifactsByChecksum(repoId, checksumSha1);
} catch (MetadataRepositoryException e) {
repositorySession.close();
throw new ConsumerException(e.getMessage(), e);
}
if (CollectionUtils.isNotEmpty(results)) {
ArtifactMetadata originalArtifact;
try {
originalArtifact = pathTranslator.getArtifactForPath(repoId, path);
} catch (Exception e) {
log.warn("Not reporting problem for invalid artifact in checksum check: {}", e.getMessage());
return;
}
for (ArtifactMetadata dupArtifact : results) {
String id = path.substring(path.lastIndexOf('/') + 1);
if (dupArtifact.getId().equals(id) && dupArtifact.getNamespace().equals(originalArtifact.getNamespace()) && dupArtifact.getProject().equals(originalArtifact.getProject()) && dupArtifact.getVersion().equals(originalArtifact.getVersion())) {
// Skip reference to itself.
log.debug("Not counting duplicate for artifact {} for path {}", dupArtifact, path);
continue;
}
RepositoryProblemFacet problem = new RepositoryProblemFacet();
problem.setRepositoryId(repoId);
problem.setNamespace(originalArtifact.getNamespace());
problem.setProject(originalArtifact.getProject());
problem.setVersion(originalArtifact.getVersion());
problem.setId(id);
// FIXME: need to get the right storage resolver for the repository the dupe artifact is in, it might be
// a different type
// FIXME: we need the project version here, not the artifact version
problem.setMessage("Duplicate Artifact Detected: " + path + " <--> " + pathTranslator.toPath(dupArtifact.getNamespace(), dupArtifact.getProject(), dupArtifact.getVersion(), dupArtifact.getId()));
problem.setProblem("duplicate-artifact");
try {
metadataRepository.addMetadataFacet(repoId, problem);
} catch (MetadataRepositoryException e) {
throw new ConsumerException(e.getMessage(), e);
}
}
}
}
Aggregations