use of org.apache.archiva.consumers.KnownRepositoryContentConsumer in project archiva by apache.
the class RepositoryPurgeConsumerTest method testConsumerByRetentionCount.
@Test
public void testConsumerByRetentionCount() throws Exception {
KnownRepositoryContentConsumer repoPurgeConsumer = applicationContext.getBean("knownRepositoryContentConsumer#repo-purge-consumer-by-retention-count", KnownRepositoryContentConsumer.class);
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration(TEST_REPO_ID, TEST_REPO_NAME);
ArtifactCleanupFeature atf = repoConfiguration.getFeature(ArtifactCleanupFeature.class).get();
// force days older off to allow retention count purge to execute.
atf.setRetentionPeriod(Period.ofDays(0));
atf.setRetentionCount(TEST_RETENTION_COUNT);
addRepoToConfiguration("retention-count", repoConfiguration);
repoPurgeConsumer.beginScan(repoConfiguration, null);
String repoRoot = prepareTestRepos();
String projectNs = "org.jruby.plugins";
String projectPath = projectNs.replaceAll("\\.", "/");
String projectName = "jruby-rake-plugin";
String projectVersion = "1.0RC1-SNAPSHOT";
String projectRoot = repoRoot + "/" + projectPath + "/" + projectName;
String versionRoot = projectRoot + "/" + projectVersion;
Path repo = getTestRepoRootPath();
Path vDir = repo.resolve(projectPath).resolve(projectName).resolve(projectVersion);
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir(TEST_REPO_ID, projectName, repo, vDir);
when(metadataRepository.getArtifacts(TEST_REPO_ID, projectNs, projectName, projectVersion)).thenReturn(ml);
Set<String> deletedVersions = new HashSet<>();
deletedVersions.add("1.0RC1-20070504.153317-1");
deletedVersions.add("1.0RC1-20070504.160758-2");
repoPurgeConsumer.processFile(PATH_TO_BY_RETENTION_COUNT_ARTIFACT);
// Verify the metadataRepository invocations
verify(metadataRepository, never()).removeProjectVersion(eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion));
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
verify(metadataRepository, times(2)).removeArtifact(metadataArg.capture(), eq(projectVersion));
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
assertTrue(deletedVersions.contains(meta.getVersion()));
}
// assert if removed from repo
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1-javadoc.jar");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1-javadoc.zip");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2-javadoc.jar");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2-javadoc.zip");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5");
assertDeleted(versionRoot + "/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1");
// assert if not removed from repo
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3-javadoc.jar");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3-javadoc.zip");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5");
assertExists(versionRoot + "/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1");
removeRepoFromConfiguration("retention-count", repoConfiguration);
}
use of org.apache.archiva.consumers.KnownRepositoryContentConsumer in project archiva by apache.
the class DefaultLegacyRepositoryConverter method convertLegacyRepository.
@Override
public void convertLegacyRepository(Path legacyRepositoryDirectory, Path repositoryDirectory, List<String> fileExclusionPatterns) throws RepositoryConversionException {
try {
String defaultRepositoryUrl = PathUtil.toUrl(repositoryDirectory);
BasicManagedRepository legacyRepository = new BasicManagedRepository("legacy", "Legacy Repository", repositoryDirectory.getParent());
legacyRepository.setLocation(legacyRepositoryDirectory.toAbsolutePath().toUri());
legacyRepository.setLayout("legacy");
ArtifactRepository repository = artifactRepositoryFactory.createArtifactRepository("default", defaultRepositoryUrl, defaultLayout, null, null);
legacyConverterConsumer.setExcludes(fileExclusionPatterns);
legacyConverterConsumer.setDestinationRepository(repository);
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<>(1);
knownConsumers.add(legacyConverterConsumer);
List<InvalidRepositoryContentConsumer> invalidConsumers = Collections.emptyList();
List<String> ignoredContent = new ArrayList<String>(Arrays.asList(RepositoryScanner.IGNORABLE_CONTENT));
repoScanner.scan(legacyRepository, knownConsumers, invalidConsumers, ignoredContent, RepositoryScanner.FRESH_SCAN);
} catch (RepositoryScannerException e) {
throw new RepositoryConversionException("Error convering legacy repository.", e);
}
}
use of org.apache.archiva.consumers.KnownRepositoryContentConsumer in project archiva by apache.
the class RepositoryContentConsumers method executeConsumers.
/**
* A convienence method to execute all of the active selected consumers for a
* particular arbitrary file.
* NOTE: Make sure that there is no repository scanning task executing before invoking this so as to prevent
* the index writer/reader of the current index-content consumer executing from getting closed. For an example,
* see ArchivaDavResource#executeConsumers( File ).
*
* @param repository the repository configuration to use.
* @param localFile the local file to execute the consumers against.
* @param updateRelatedArtifacts TODO
*/
public void executeConsumers(ManagedRepository repository, Path localFile, boolean updateRelatedArtifacts) throws RepositoryAdminException {
List<KnownRepositoryContentConsumer> selectedKnownConsumers = null;
// Run the repository consumers
try {
Closure<RepositoryContentConsumer> triggerBeginScan = new TriggerBeginScanClosure(repository, getStartTime(), false);
selectedKnownConsumers = getSelectedKnownConsumers();
// - do not create missing/fix invalid checksums and update metadata when deploying from webdav since these are uploaded by maven
if (!updateRelatedArtifacts) {
List<KnownRepositoryContentConsumer> clone = new ArrayList<>();
clone.addAll(selectedKnownConsumers);
for (KnownRepositoryContentConsumer consumer : clone) {
if (consumer.getId().equals("create-missing-checksums") || consumer.getId().equals("metadata-updater")) {
selectedKnownConsumers.remove(consumer);
}
}
}
List<InvalidRepositoryContentConsumer> selectedInvalidConsumers = getSelectedInvalidConsumers();
IterableUtils.forEach(selectedKnownConsumers, triggerBeginScan);
IterableUtils.forEach(selectedInvalidConsumers, triggerBeginScan);
// yuck. In case you can't read this, it says
// "process the file if the consumer has it in the includes list, and not in the excludes list"
Path repoPath = PathUtil.getPathFromUri(repository.getLocation());
BaseFile baseFile = new BaseFile(repoPath.toString(), localFile.toFile());
ConsumerWantsFilePredicate predicate = new ConsumerWantsFilePredicate(repository);
predicate.setBasefile(baseFile);
predicate.setCaseSensitive(false);
ConsumerProcessFileClosure closure = new ConsumerProcessFileClosure();
closure.setBasefile(baseFile);
closure.setExecuteOnEntireRepo(false);
Closure<RepositoryContentConsumer> processIfWanted = IfClosure.ifClosure(predicate, closure);
IterableUtils.forEach(selectedKnownConsumers, processIfWanted);
if (predicate.getWantedFileCount() <= 0) {
// Nothing known processed this file. It is invalid!
IterableUtils.forEach(selectedInvalidConsumers, closure);
}
TriggerScanCompletedClosure scanCompletedClosure = new TriggerScanCompletedClosure(repository, false);
IterableUtils.forEach(selectedKnownConsumers, scanCompletedClosure);
} finally {
/* TODO: This is never called by the repository scanner instance, so not calling here either - but it probably should be?
IterableUtils.forEach( availableKnownConsumers, triggerCompleteScan );
IterableUtils.forEach( availableInvalidConsumers, triggerCompleteScan );
*/
releaseSelectedKnownConsumers(selectedKnownConsumers);
}
}
use of org.apache.archiva.consumers.KnownRepositoryContentConsumer in project archiva by apache.
the class RepositoryScannerTest method testDefaultRepositoryMetadataScanner.
@Test
public void testDefaultRepositoryMetadataScanner() throws Exception {
List<String> actualMetadataPaths = new ArrayList<>();
actualMetadataPaths.add("org/apache/maven/some-ejb/1.0/maven-metadata.xml");
actualMetadataPaths.add("org/apache/maven/update/test-not-updated/maven-metadata.xml");
actualMetadataPaths.add("org/apache/maven/update/test-updated/maven-metadata.xml");
actualMetadataPaths.add("org/apache/maven/maven-metadata.xml");
actualMetadataPaths.add("org/apache/testgroup/discovery/1.0/maven-metadata.xml");
actualMetadataPaths.add("org/apache/testgroup/discovery/maven-metadata.xml");
actualMetadataPaths.add("javax/sql/jdbc/2.0/maven-metadata-repository.xml");
actualMetadataPaths.add("javax/sql/jdbc/maven-metadata-repository.xml");
actualMetadataPaths.add("javax/sql/maven-metadata-repository.xml");
actualMetadataPaths.add("javax/maven-metadata.xml");
ManagedRepository repository = createDefaultRepository();
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<>();
KnownScanConsumer knownConsumer = new KnownScanConsumer();
knownConsumer.setIncludes(new String[] { "**/maven-metadata*.xml" });
knownConsumers.add(knownConsumer);
List<InvalidRepositoryContentConsumer> invalidConsumers = new ArrayList<>();
InvalidScanConsumer badconsumer = new InvalidScanConsumer();
invalidConsumers.add(badconsumer);
RepositoryScanner scanner = lookupRepositoryScanner();
RepositoryScanStatistics stats = scanner.scan(repository, knownConsumers, invalidConsumers, getIgnoreList(), RepositoryScanner.FRESH_SCAN);
assertNotNull("Stats should not be null.", stats);
assertMinimumHits("Stats.totalFileCount", actualMetadataPaths.size(), stats.getTotalFileCount());
assertMinimumHits("Processed Count", actualMetadataPaths.size(), knownConsumer.getProcessCount());
}
use of org.apache.archiva.consumers.KnownRepositoryContentConsumer in project archiva by apache.
the class RepositoryScannerTest method testLegacyRepositoryArtifactScanner.
@Test
public void testLegacyRepositoryArtifactScanner() throws Exception {
List<String> actualArtifactPaths = new ArrayList<>();
actualArtifactPaths.add("invalid/jars/1.0/invalid-1.0.jar");
actualArtifactPaths.add("invalid/jars/invalid-1.0.rar");
actualArtifactPaths.add("invalid/jars/invalid.jar");
actualArtifactPaths.add("invalid/invalid-1.0.jar");
actualArtifactPaths.add("javax.sql/jars/jdbc-2.0.jar");
actualArtifactPaths.add("org.apache.maven/jars/some-ejb-1.0-client.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0-sources.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-UNKNOWN.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0.zip");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0-20050611.112233-1.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0.tar.gz");
actualArtifactPaths.add("org.apache.maven.update/jars/test-not-updated-1.0.jar");
actualArtifactPaths.add("org.apache.maven.update/jars/test-updated-1.0.jar");
ManagedRepository repository = createLegacyRepository();
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<>();
KnownScanConsumer consumer = new KnownScanConsumer();
consumer.setIncludes(ARTIFACT_PATTERNS);
knownConsumers.add(consumer);
List<InvalidRepositoryContentConsumer> invalidConsumers = new ArrayList<>();
InvalidScanConsumer badconsumer = new InvalidScanConsumer();
invalidConsumers.add(badconsumer);
RepositoryScanner scanner = lookupRepositoryScanner();
RepositoryScanStatistics stats = scanner.scan(repository, knownConsumers, invalidConsumers, getIgnoreList(), RepositoryScanner.FRESH_SCAN);
assertNotNull("Stats should not be null.", stats);
assertMinimumHits("Stats.totalFileCount", actualArtifactPaths.size(), stats.getTotalFileCount());
assertMinimumHits("Processed Count", actualArtifactPaths.size(), consumer.getProcessCount());
}
Aggregations