use of org.apache.archiva.repository.content.Artifact in project archiva by apache.
the class ManagedDefaultTransferTest method testGetDefaultLayoutAlreadyPresentOlderThanRemotePolicyIgnored.
/**
* <p>
* Request a file, that exists locally, and remotely.
* </p>
* <p>
* All policies are set to IGNORE.
* </p>
* <p>
* Managed file is older than Remote file.
* </p>
* <p>
* Transfer should have occured, as managed file is older than remote.
* </p>
*
* @throws Exception
*/
@Test
public void testGetDefaultLayoutAlreadyPresentOlderThanRemotePolicyIgnored() throws Exception {
String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
setupTestableManagedRepository(path);
Path expectedFile = managedDefaultDir.resolve(path);
Path remoteFile = Paths.get(REPOPATH_PROXIED1, path);
// Set the managed file to be newer than remote file.
setManagedOlderThanRemote(expectedFile, remoteFile);
BaseRepositoryContentLayout layout = managedDefaultRepository.getLayout(BaseRepositoryContentLayout.class);
Artifact artifact = layout.getArtifact(path);
assertTrue(Files.exists(expectedFile));
// Configure Connector (usually done within archiva.xml configuration)
saveConnector(ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false);
// Attempt the proxy fetch.
StorageAsset downloadedFile = proxyHandler.fetchFromProxies(managedDefaultRepository.getRepository(), artifact);
Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals(expectedFile, downloadedFile.getFilePath(), proxiedFile);
assertNoTempFiles(expectedFile);
}
use of org.apache.archiva.repository.content.Artifact in project archiva by apache.
the class ManagedDefaultTransferTest method testGetInSecondProxiedRepo.
@Test
public void testGetInSecondProxiedRepo() throws Exception {
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
setupTestableManagedRepository(path);
Path expectedFile = managedDefaultDir.resolve(path);
BaseRepositoryContentLayout layout = managedDefaultRepository.getLayout(BaseRepositoryContentLayout.class);
Artifact artifact = layout.getArtifact(path);
assertNotExistsInManagedDefaultRepo(expectedFile);
// Configure Connector (usually done within archiva.xml configuration)
saveConnector(ID_DEFAULT_MANAGED, ID_PROXIED1, false);
saveConnector(ID_DEFAULT_MANAGED, ID_PROXIED2, false);
// Attempt the proxy fetch.
StorageAsset downloadedFile = proxyHandler.fetchFromProxies(managedDefaultRepository.getRepository(), artifact);
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals(expectedFile, downloadedFile.getFilePath(), proxied2File);
assertNoTempFiles(expectedFile);
}
use of org.apache.archiva.repository.content.Artifact in project archiva by apache.
the class ManagedDefaultTransferTest method testGetInSecondProxiedRepoFirstFails.
@Test
public void testGetInSecondProxiedRepoFirstFails() throws Exception {
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
setupTestableManagedRepository(path);
Path expectedFile = managedDefaultDir.resolve(path);
BaseRepositoryContentLayout layout = managedDefaultRepository.getLayout(BaseRepositoryContentLayout.class);
Artifact artifact = layout.getArtifact(path);
assertNotExistsInManagedDefaultRepo(expectedFile);
// Configure Repository (usually done within archiva.xml configuration)
saveRemoteRepositoryConfig("badproxied", "Bad Proxied", "" + "http://bad.machine.com/repo/", "default");
doThrow(new ResourceDoesNotExistException("transfer failed")).when(wagonMock).get(eq(path), any());
// Configure Connector (usually done within archiva.xml configuration)
saveConnector(ID_DEFAULT_MANAGED, "badproxied", false);
saveConnector(ID_DEFAULT_MANAGED, ID_PROXIED2, false);
// Attempt the proxy fetch.
StorageAsset downloadedFile = proxyHandler.fetchFromProxies(managedDefaultRepository.getRepository(), artifact);
verify(wagonMock, atLeastOnce()).get(eq(path), any());
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals(expectedFile, downloadedFile.getFilePath(), proxied2File);
assertNoTempFiles(expectedFile);
}
use of org.apache.archiva.repository.content.Artifact in project archiva by apache.
the class ArchivaDavResourceFactory method processRepository.
private DavResource processRepository(final DavServletRequest request, ArchivaDavResourceLocator archivaLocator, String activePrincipal, ManagedRepositoryContent managedRepositoryContent, org.apache.archiva.repository.ManagedRepository managedRepository) throws DavException {
DavResource resource = null;
if (isAuthorized(request, managedRepositoryContent.getId())) {
boolean readMethod = WebdavMethodUtil.isReadMethod(request.getMethod());
// Maven Centric part ask evaluation if -SNAPSHOT
// MRM-1846 test if read method to prevent issue with maven 2.2.1 and uniqueVersion false
String path = readMethod ? evaluatePathWithVersion(archivaLocator, managedRepositoryContent, request.getContextPath()) : getLogicalResource(archivaLocator, managedRepository, false);
if (path.startsWith("/")) {
path = path.substring(1);
}
LogicalResource logicalResource = new LogicalResource(path);
StorageAsset repoAsset = managedRepository.getAsset(path);
// Path resourceFile = Paths.get( managedRepositoryContent.getRepoRoot(), path );
try {
resource = new ArchivaDavResource(repoAsset, path, managedRepository, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler);
} catch (LayoutException e) {
log.error("Incompatible layout: {}", e.getMessage(), e);
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
}
if (WebdavMethodUtil.isReadMethod(request.getMethod())) {
if (archivaLocator.getHref(false).endsWith("/") && !repoAsset.isContainer()) {
// force a resource not found
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Resource does not exist");
} else {
if (!resource.isCollection()) {
boolean previouslyExisted = repoAsset.exists();
boolean fromProxy = fetchContentFromProxies(managedRepository, request, logicalResource);
StorageAsset resourceAsset = null;
// legacy layout format.
try {
// Perform an adjustment of the resource to the managed
// repository expected path.
// String localResourcePath = managedRepository.getRequestInfo().toNativePath( logicalResource.getPath() );
resourceAsset = managedRepository.getAsset(logicalResource.getPath());
resource = new ArchivaDavResource(resourceAsset, logicalResource.getPath(), managedRepository, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler);
} catch (LayoutException e) {
if (resourceAsset == null || !resourceAsset.exists()) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, e);
}
}
if (fromProxy) {
String action = (previouslyExisted ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE) + PROXIED_SUFFIX;
log.debug("Proxied artifact '{}' in repository '{}' (current user '{}')", resourceAsset.getName(), managedRepositoryContent.getId(), activePrincipal);
triggerAuditEvent(request.getRemoteAddr(), archivaLocator.getRepositoryId(), logicalResource.getPath(), action, activePrincipal);
}
if (!resourceAsset.exists()) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Resource does not exist");
}
}
}
}
if (request.getMethod().equals(HTTP_PUT_METHOD)) {
String resourcePath = logicalResource.getPath();
RepositoryRequestInfo repositoryRequestInfo = managedRepository.getRequestInfo();
// we suppose that release-artifacts can be deployed only to repos enabled for releases
if (managedRepositoryContent.getRepository().getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE) && !repositoryRequestInfo.isMetadata(resourcePath) && !repositoryRequestInfo.isSupportFile(resourcePath)) {
// ArtifactReference artifact = null;
Artifact artifact = null;
try {
BaseRepositoryContentLayout layout = managedRepositoryContent.getLayout(BaseRepositoryContentLayout.class);
ContentItem artifactItem = managedRepositoryContent.toItem(resourcePath);
artifact = layout.adaptItem(Artifact.class, artifactItem);
if (!VersionUtil.isSnapshot(artifact.getVersion().getId())) {
// check if artifact already exists and if artifact re-deployment to the repository is allowed
if (artifactItem.exists() && managedRepositoryContent.getRepository().blocksRedeployments()) {
log.warn("Overwriting released artifacts in repository '{}' is not allowed.", managedRepositoryContent.getId());
throw new DavException(HttpServletResponse.SC_CONFLICT, "Overwriting released artifacts is not allowed.");
}
}
} catch (LayoutException e) {
log.warn("Artifact path '{}' is invalid.", resourcePath);
} catch (ContentAccessException e) {
e.printStackTrace();
}
}
/*
* Create parent directories that don't exist when writing a file This actually makes this
* implementation not compliant to the WebDAV RFC - but we have enough knowledge about how the
* collection is being used to do this reasonably and some versions of Maven's WebDAV don't correctly
* create the collections themselves.
*/
StorageAsset rootDirectory = managedRepositoryContent.getRepository().getRoot();
StorageAsset destDir = rootDirectory.resolve(logicalResource.getPath()).getParent();
if (!destDir.exists()) {
try {
destDir.create();
} catch (IOException e) {
log.error("Could not create directory {}: {}", destDir, e.getMessage(), e);
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not create directory " + destDir);
}
String relPath = PathUtil.getRelative(rootDirectory.getPath(), destDir.getPath());
log.debug("Creating destination directory '{}' (current user '{}')", destDir.getName(), activePrincipal);
triggerAuditEvent(request.getRemoteAddr(), managedRepositoryContent.getId(), relPath, AuditEvent.CREATE_DIR, activePrincipal);
}
}
}
return resource;
}
use of org.apache.archiva.repository.content.Artifact in project archiva by apache.
the class DaysOldRepositoryPurge method process.
@Override
public void process(String path) throws RepositoryPurgeException {
try {
ContentItem item = repository.toItem(path);
Artifact artifactItem = repository.getLayout(BaseRepositoryContentLayout.class).adaptItem(Artifact.class, item);
if (!artifactItem.exists()) {
return;
}
// ArtifactReference artifact = repository.toArtifactReference( path );
Calendar olderThanThisDate = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
olderThanThisDate.add(Calendar.DATE, -retentionPeriod);
ArchivaItemSelector selector = ArchivaItemSelector.builder().withNamespace(artifactItem.getVersion().getProject().getNamespace().getId()).withProjectId(artifactItem.getVersion().getProject().getId()).withVersion(artifactItem.getVersion().getId()).withClassifier("*").includeRelatedArtifacts().build();
List<String> artifactVersions;
try (Stream<? extends Artifact> stream = repository.getLayout(BaseRepositoryContentLayout.class).newArtifactStream(selector)) {
artifactVersions = stream.map(a -> a.getArtifactVersion()).filter(StringUtils::isNotEmpty).distinct().collect(Collectors.toList());
}
Collections.sort(artifactVersions, VersionComparator.getInstance());
if (retentionCount > artifactVersions.size()) {
// Done. nothing to do here. skip it.
return;
}
int countToPurge = artifactVersions.size() - retentionCount;
ArchivaItemSelector.Builder artifactSelectorBuilder = ArchivaItemSelector.builder().withNamespace(artifactItem.getVersion().getProject().getNamespace().getId()).withProjectId(artifactItem.getVersion().getProject().getId()).withVersion(artifactItem.getVersion().getId()).withArtifactId(artifactItem.getId()).withClassifier("*").includeRelatedArtifacts();
Set<Artifact> artifactsToDelete = new HashSet<>();
for (String version : artifactVersions) {
if (countToPurge-- <= 0) {
break;
}
ArchivaItemSelector artifactSelector = artifactSelectorBuilder.withArtifactVersion(version).build();
try {
// Is this a generic snapshot "1.0-SNAPSHOT" ?
if (VersionUtil.isGenericSnapshot(version)) {
List<? extends Artifact> artifactList = repository.getLayout(BaseRepositoryContentLayout.class).getArtifacts(artifactSelector);
if (artifactList.size() > 0 && artifactList.get(0).getAsset().getModificationTime().toEpochMilli() < olderThanThisDate.getTimeInMillis()) {
artifactsToDelete.addAll(artifactList);
}
} else // Is this a timestamp snapshot "1.0-20070822.123456-42" ?
if (VersionUtil.isUniqueSnapshot(version)) {
Calendar timestampCal = uniqueSnapshotToCalendar(version);
if (timestampCal.getTimeInMillis() < olderThanThisDate.getTimeInMillis()) {
artifactsToDelete.addAll(repository.getLayout(BaseRepositoryContentLayout.class).getArtifacts(artifactSelector));
}
}
} catch (IllegalArgumentException e) {
log.error("Bad selector for artifact: {}", e.getMessage(), e);
// continue
}
}
purge(artifactsToDelete);
} catch (LayoutException e) {
log.debug("Not processing file that is not an artifact: {}", e.getMessage());
} catch (ContentAccessException e) {
e.printStackTrace();
}
}
Aggregations