use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.
the class DefaultBrowseService method artifactAvailable.
@Override
public AvailabilityStatus artifactAvailable(String groupId, String artifactId, String version, String classifier, String repositoryId) throws ArchivaRestServiceException {
List<String> selectedRepos = getSelectedRepos(repositoryId);
boolean snapshot = VersionUtil.isSnapshot(version);
try {
for (String repoId : selectedRepos) {
org.apache.archiva.repository.ManagedRepository managedRepo = repositoryRegistry.getManagedRepository(repoId);
if (!proxyRegistry.hasHandler(managedRepo.getType())) {
throw new RepositoryException("No proxy handler found for repository type " + managedRepo.getType());
}
RepositoryProxyHandler proxyHandler = proxyRegistry.getHandler(managedRepo.getType()).get(0);
if ((snapshot && !managedRepo.getActiveReleaseSchemes().contains(ReleaseScheme.SNAPSHOT)) || (!snapshot && managedRepo.getActiveReleaseSchemes().contains(ReleaseScheme.SNAPSHOT))) {
continue;
}
ManagedRepositoryContent managedRepositoryContent = getManagedRepositoryContent(repoId);
// FIXME default to jar which can be wrong for war zip etc....
ArchivaItemSelector itemSelector = ArchivaItemSelector.builder().withNamespace(groupId).withProjectId(artifactId).withVersion(version).withClassifier(StringUtils.isEmpty(classifier) ? "" : classifier).withType("jar").withArtifactId(artifactId).build();
org.apache.archiva.repository.content.Artifact archivaArtifact = managedRepositoryContent.getItem(itemSelector).adapt(org.apache.archiva.repository.content.Artifact.class);
StorageAsset file = archivaArtifact.getAsset();
if (file != null && file.exists()) {
return new AvailabilityStatus(true);
}
// in case of SNAPSHOT we can have timestamped version locally !
if (StringUtils.endsWith(version, VersionUtil.SNAPSHOT)) {
StorageAsset metadataFile = file.getStorage().getAsset(file.getParent().getPath() + "/" + MetadataTools.MAVEN_METADATA);
if (metadataFile.exists()) {
MetadataReader metadataReader = repositoryRegistry.getMetadataReader(managedRepositoryContent.getRepository().getType());
ArchivaRepositoryMetadata archivaRepositoryMetadata = metadataReader.read(metadataFile);
int buildNumber = archivaRepositoryMetadata.getSnapshotVersion().getBuildNumber();
String timeStamp = archivaRepositoryMetadata.getSnapshotVersion().getTimestamp();
// rebuild file name with timestamped version and build number
String timeStampFileName = //
new StringBuilder(artifactId).append('-').append(//
StringUtils.remove(version, "-" + VersionUtil.SNAPSHOT)).append('-').append(//
timeStamp).append('-').append(//
Integer.toString(buildNumber)).append(//
(StringUtils.isEmpty(classifier) ? "" : "-" + classifier)).append(".jar").toString();
StorageAsset timeStampFile = file.getStorage().getAsset(file.getParent().getPath() + "/" + timeStampFileName);
log.debug("try to find timestamped snapshot version file: {}", timeStampFile.getPath());
if (timeStampFile.exists()) {
return new AvailabilityStatus(true);
}
}
}
String path = managedRepositoryContent.toPath(archivaArtifact);
file = proxyHandler.fetchFromProxies(managedRepositoryContent.getRepository(), path);
if (file != null && file.exists()) {
// download pom now
String pomPath = StringUtils.substringBeforeLast(path, ".jar") + ".pom";
proxyHandler.fetchFromProxies(managedRepositoryContent.getRepository(), pomPath);
return new AvailabilityStatus(true);
}
}
} catch (RepositoryException e) {
log.error(e.getMessage(), e);
throw new ArchivaRestServiceException(e.getMessage(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
}
return new AvailabilityStatus(false);
}
use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.
the class MetadataUpdaterConsumer method updateVersionMetadata.
private void updateVersionMetadata(Artifact artifact, String path) {
try {
String metadataPath = this.metadataTools.toPath(artifact.getVersion());
StorageAsset projectMetadata = this.repositoryDir.resolve(metadataPath);
if (projectMetadata.exists() && (projectMetadata.getModificationTime().toEpochMilli() >= this.scanStartTimestamp)) {
// This metadata is up to date. skip it.
log.debug("Skipping uptodate metadata: {}", metadataPath);
return;
}
metadataTools.updateMetadata(this.repository, metadataPath);
log.debug("Updated metadata: {}", metadataPath);
} catch (RepositoryMetadataException e) {
log.error("Unable to write version metadata for artifact [{}]: ", path, e);
triggerConsumerError(TYPE_METADATA_WRITE_FAILURE, "Unable to write version metadata for artifact [" + path + "]: " + e.getMessage());
}
}
use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.
the class AbstractRepositoryPurge method purgeSupportFiles.
/**
* <p>
* This find support files for the artifactFile and deletes them.
* </p>
* <p>
* Support Files are things like ".sha1", ".md5", ".asc", etc.
* </p>
*
* @param artifactFile the file to base off of.
*/
private void purgeSupportFiles(StorageAsset artifactFile) {
StorageAsset parentDir = artifactFile.getParent();
if (!parentDir.exists()) {
return;
}
final String artifactName = artifactFile.getName();
StorageUtil.walk(parentDir, a -> {
if (!a.isContainer() && a.getName().startsWith(artifactName))
deleteSilently(a);
});
}
use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.
the class CleanupReleasedSnapshotsRepositoryPurge method process.
@Override
public void process(String path) throws RepositoryPurgeException {
try {
StorageAsset artifactFile = repository.getRepository().getRoot().resolve(path);
BaseRepositoryContentLayout layout = repository.getLayout(BaseRepositoryContentLayout.class);
if (!artifactFile.exists()) {
// Nothing to do here, file doesn't exist, skip it.
return;
}
Artifact artifactRef = layout.getArtifact(path);
if (!VersionUtil.isSnapshot(artifactRef.getVersion().getId())) {
// Nothing to do here, not a snapshot, skip it.
return;
}
ItemSelector projectSelector = ArchivaItemSelector.builder().withNamespace(artifactRef.getNamespace().getId()).withProjectId(artifactRef.getId()).build();
// Gether the released versions
List<String> releasedVersions = new ArrayList<>();
Collection<org.apache.archiva.repository.ManagedRepository> repos = repositoryRegistry.getManagedRepositories();
for (org.apache.archiva.repository.ManagedRepository repo : repos) {
if (repo.getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE)) {
BaseRepositoryContentLayout repoContent = repo.getContent().getLayout(BaseRepositoryContentLayout.class);
Project proj = repoContent.getProject(projectSelector);
for (Version version : repoContent.getVersions(proj)) {
if (!VersionUtil.isSnapshot(version.getId())) {
releasedVersions.add(version.getId());
}
}
}
}
Collections.sort(releasedVersions, VersionComparator.getInstance());
// Now clean out any version that is earlier than the highest released version.
boolean needsMetadataUpdate = false;
ArchivaItemSelector.Builder versionSelectorBuilder = ArchivaItemSelector.builder().withNamespace(artifactRef.getNamespace().getId()).withProjectId(artifactRef.getId()).withArtifactId(artifactRef.getId());
MetadataRepository metadataRepository = repositorySession.getRepository();
if (releasedVersions.contains(VersionUtil.getReleaseVersion(artifactRef.getVersion().getId()))) {
ArchivaItemSelector selector = versionSelectorBuilder.withVersion(artifactRef.getVersion().getId()).build();
Version version = layout.getVersion(selector);
if (version.exists()) {
repository.deleteItem(version);
}
for (RepositoryListener listener : listeners) {
listener.deleteArtifact(metadataRepository, repository.getId(), artifactRef.getNamespace().getId(), artifactRef.getId(), artifactRef.getVersion().getId(), artifactFile.getName());
}
metadataRepository.removeProjectVersion(repositorySession, repository.getId(), artifactRef.getNamespace().getId(), artifactRef.getId(), artifactRef.getVersion().getId());
needsMetadataUpdate = true;
}
if (needsMetadataUpdate) {
updateMetadata(artifactRef);
}
} catch (LayoutException e) {
log.debug("Not processing file that is not an artifact: {}", e.getMessage());
} catch (MetadataRepositoryException e) {
log.error("Could not remove metadata during cleanup of released snapshots of {}", path, e);
} catch (ContentAccessException e) {
e.printStackTrace();
} catch (ItemNotFoundException e) {
log.error("Could not find item to delete {}", e.getMessage(), e);
}
}
use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.
the class RepositoryTaskAdministrationTest method testScheduleIndexScanWithFile.
@Test
public void testScheduleIndexScanWithFile() throws RepositoryAdminException, TaskQueueException {
ManagedRepository managedRepo = mock(ManagedRepository.class, RETURNS_DEEP_STUBS);
when(registry.getManagedRepository("internal")).thenReturn(managedRepo);
StorageAsset asset = mock(StorageAsset.class);
when(asset.getFilePath()).thenReturn(Paths.get("abc/def/ghij.pom"));
when(asset.exists()).thenReturn(true);
when(registry.getManagedRepository("internal").getAsset("abc/def/ghij.pom")).thenReturn(asset);
taskAdministration.scheduleIndexScan("internal", "abc/def/ghij.pom");
ArgumentCaptor<ArtifactIndexingTask> captor = ArgumentCaptor.forClass(ArtifactIndexingTask.class);
verify(repositoryArchivaTaskScheduler, times(0)).queueTask(any());
verify(indexingArchivaTaskScheduler, times(1)).queueTask(captor.capture());
ArtifactIndexingTask caption = captor.getValue();
assertFalse(caption.isExecuteOnEntireRepo());
assertEquals("abc/def/ghij.pom", caption.getResourceFile().toString());
}
Aggregations