use of org.apache.archiva.repository.content.BaseRepositoryContentLayout in project archiva by apache.
the class ManagedDefaultTransferTest method testGetInSecondProxiedRepo.
@Test
public void testGetInSecondProxiedRepo() throws Exception {
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
setupTestableManagedRepository(path);
Path expectedFile = managedDefaultDir.resolve(path);
BaseRepositoryContentLayout layout = managedDefaultRepository.getLayout(BaseRepositoryContentLayout.class);
Artifact artifact = layout.getArtifact(path);
assertNotExistsInManagedDefaultRepo(expectedFile);
// Configure Connector (usually done within archiva.xml configuration)
saveConnector(ID_DEFAULT_MANAGED, ID_PROXIED1, false);
saveConnector(ID_DEFAULT_MANAGED, ID_PROXIED2, false);
// Attempt the proxy fetch.
StorageAsset downloadedFile = proxyHandler.fetchFromProxies(managedDefaultRepository.getRepository(), artifact);
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals(expectedFile, downloadedFile.getFilePath(), proxied2File);
assertNoTempFiles(expectedFile);
}
use of org.apache.archiva.repository.content.BaseRepositoryContentLayout in project archiva by apache.
the class ManagedDefaultTransferTest method testGetInSecondProxiedRepoFirstFails.
@Test
public void testGetInSecondProxiedRepoFirstFails() throws Exception {
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
setupTestableManagedRepository(path);
Path expectedFile = managedDefaultDir.resolve(path);
BaseRepositoryContentLayout layout = managedDefaultRepository.getLayout(BaseRepositoryContentLayout.class);
Artifact artifact = layout.getArtifact(path);
assertNotExistsInManagedDefaultRepo(expectedFile);
// Configure Repository (usually done within archiva.xml configuration)
saveRemoteRepositoryConfig("badproxied", "Bad Proxied", "" + "http://bad.machine.com/repo/", "default");
doThrow(new ResourceDoesNotExistException("transfer failed")).when(wagonMock).get(eq(path), any());
// Configure Connector (usually done within archiva.xml configuration)
saveConnector(ID_DEFAULT_MANAGED, "badproxied", false);
saveConnector(ID_DEFAULT_MANAGED, ID_PROXIED2, false);
// Attempt the proxy fetch.
StorageAsset downloadedFile = proxyHandler.fetchFromProxies(managedDefaultRepository.getRepository(), artifact);
verify(wagonMock, atLeastOnce()).get(eq(path), any());
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals(expectedFile, downloadedFile.getFilePath(), proxied2File);
assertNoTempFiles(expectedFile);
}
use of org.apache.archiva.repository.content.BaseRepositoryContentLayout in project archiva by apache.
the class ErrorHandlingTest method confirmFailures.
private void confirmFailures(String path, String[] ids) throws LayoutException {
// Attempt the proxy fetch.
StorageAsset downloadedFile = null;
try {
BaseRepositoryContentLayout layout = managedDefaultRepository.getLayout(BaseRepositoryContentLayout.class);
downloadedFile = proxyHandler.fetchFromProxies(managedDefaultRepository.getRepository(), layout.getArtifact(path));
fail("Proxy should not have succeeded");
} catch (ProxyDownloadException e) {
assertEquals(ids.length, e.getFailures().size());
for (String id : ids) {
assertTrue(e.getFailures().keySet().contains(id));
}
}
assertNotDownloaded(downloadedFile);
}
use of org.apache.archiva.repository.content.BaseRepositoryContentLayout in project archiva by apache.
the class DefaultRepositoriesService method removeProjectVersion.
@Override
public ActionStatus removeProjectVersion(String repositoryId, String namespace, String projectId, String version) throws ArchivaRestServiceException {
// if not a generic we can use the standard way to delete artifact
if (!VersionUtil.isGenericSnapshot(version)) {
Artifact artifact = new Artifact(namespace, projectId, version);
artifact.setRepositoryId(repositoryId);
artifact.setContext(repositoryId);
return deleteArtifact(artifact);
}
if (StringUtils.isEmpty(repositoryId)) {
throw new ArchivaRestServiceException("repositoryId cannot be null", 400, null);
}
if (!getPermissionStatus(repositoryId).isAuthorizedToDeleteArtifacts()) {
throw new ArchivaRestServiceException("not authorized to delete artifacts", 403, null);
}
if (StringUtils.isEmpty(namespace)) {
throw new ArchivaRestServiceException("groupId cannot be null", 400, null);
}
if (StringUtils.isEmpty(projectId)) {
throw new ArchivaRestServiceException("artifactId cannot be null", 400, null);
}
if (StringUtils.isEmpty(version)) {
throw new ArchivaRestServiceException("version cannot be null", 400, null);
}
RepositorySession repositorySession = null;
try {
repositorySession = repositorySessionFactory.createSession();
} catch (MetadataRepositoryException e) {
e.printStackTrace();
}
try {
ManagedRepositoryContent repository = getManagedRepositoryContent(repositoryId);
BaseRepositoryContentLayout layout = repository.getLayout(BaseRepositoryContentLayout.class);
ArchivaItemSelector selector = ArchivaItemSelector.builder().withNamespace(namespace).withProjectId(projectId).withVersion(version).build();
Version versionItem = layout.getVersion(selector);
if (versionItem != null && versionItem.exists()) {
repository.deleteItem(versionItem);
}
MetadataRepository metadataRepository = repositorySession.getRepository();
Collection<ArtifactMetadata> artifacts = metadataRepository.getArtifacts(repositorySession, repositoryId, namespace, projectId, version);
for (ArtifactMetadata artifactMetadata : artifacts) {
metadataRepository.removeTimestampedArtifact(repositorySession, artifactMetadata, version);
}
metadataRepository.removeProjectVersion(repositorySession, repositoryId, namespace, projectId, version);
} catch (MetadataRepositoryException | MetadataResolutionException | RepositoryException | ItemNotFoundException | LayoutException e) {
throw new ArchivaRestServiceException("Repository exception: " + e.getMessage(), 500, e);
} finally {
try {
repositorySession.save();
} catch (MetadataSessionException e) {
log.error("Session save failed {}", e.getMessage());
}
repositorySession.close();
}
return ActionStatus.SUCCESS;
}
use of org.apache.archiva.repository.content.BaseRepositoryContentLayout in project archiva by apache.
the class ArchivaDavResourceFactory method processRepository.
private DavResource processRepository(final DavServletRequest request, ArchivaDavResourceLocator archivaLocator, String activePrincipal, ManagedRepositoryContent managedRepositoryContent, org.apache.archiva.repository.ManagedRepository managedRepository) throws DavException {
DavResource resource = null;
if (isAuthorized(request, managedRepositoryContent.getId())) {
boolean readMethod = WebdavMethodUtil.isReadMethod(request.getMethod());
// Maven Centric part ask evaluation if -SNAPSHOT
// MRM-1846 test if read method to prevent issue with maven 2.2.1 and uniqueVersion false
String path = readMethod ? evaluatePathWithVersion(archivaLocator, managedRepositoryContent, request.getContextPath()) : getLogicalResource(archivaLocator, managedRepository, false);
if (path.startsWith("/")) {
path = path.substring(1);
}
LogicalResource logicalResource = new LogicalResource(path);
StorageAsset repoAsset = managedRepository.getAsset(path);
// Path resourceFile = Paths.get( managedRepositoryContent.getRepoRoot(), path );
try {
resource = new ArchivaDavResource(repoAsset, path, managedRepository, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler);
} catch (LayoutException e) {
log.error("Incompatible layout: {}", e.getMessage(), e);
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
}
if (WebdavMethodUtil.isReadMethod(request.getMethod())) {
if (archivaLocator.getHref(false).endsWith("/") && !repoAsset.isContainer()) {
// force a resource not found
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Resource does not exist");
} else {
if (!resource.isCollection()) {
boolean previouslyExisted = repoAsset.exists();
boolean fromProxy = fetchContentFromProxies(managedRepository, request, logicalResource);
StorageAsset resourceAsset = null;
// legacy layout format.
try {
// Perform an adjustment of the resource to the managed
// repository expected path.
// String localResourcePath = managedRepository.getRequestInfo().toNativePath( logicalResource.getPath() );
resourceAsset = managedRepository.getAsset(logicalResource.getPath());
resource = new ArchivaDavResource(resourceAsset, logicalResource.getPath(), managedRepository, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler);
} catch (LayoutException e) {
if (resourceAsset == null || !resourceAsset.exists()) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, e);
}
}
if (fromProxy) {
String action = (previouslyExisted ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE) + PROXIED_SUFFIX;
log.debug("Proxied artifact '{}' in repository '{}' (current user '{}')", resourceAsset.getName(), managedRepositoryContent.getId(), activePrincipal);
triggerAuditEvent(request.getRemoteAddr(), archivaLocator.getRepositoryId(), logicalResource.getPath(), action, activePrincipal);
}
if (!resourceAsset.exists()) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Resource does not exist");
}
}
}
}
if (request.getMethod().equals(HTTP_PUT_METHOD)) {
String resourcePath = logicalResource.getPath();
RepositoryRequestInfo repositoryRequestInfo = managedRepository.getRequestInfo();
// we suppose that release-artifacts can be deployed only to repos enabled for releases
if (managedRepositoryContent.getRepository().getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE) && !repositoryRequestInfo.isMetadata(resourcePath) && !repositoryRequestInfo.isSupportFile(resourcePath)) {
// ArtifactReference artifact = null;
Artifact artifact = null;
try {
BaseRepositoryContentLayout layout = managedRepositoryContent.getLayout(BaseRepositoryContentLayout.class);
ContentItem artifactItem = managedRepositoryContent.toItem(resourcePath);
artifact = layout.adaptItem(Artifact.class, artifactItem);
if (!VersionUtil.isSnapshot(artifact.getVersion().getId())) {
// check if artifact already exists and if artifact re-deployment to the repository is allowed
if (artifactItem.exists() && managedRepositoryContent.getRepository().blocksRedeployments()) {
log.warn("Overwriting released artifacts in repository '{}' is not allowed.", managedRepositoryContent.getId());
throw new DavException(HttpServletResponse.SC_CONFLICT, "Overwriting released artifacts is not allowed.");
}
}
} catch (LayoutException e) {
log.warn("Artifact path '{}' is invalid.", resourcePath);
} catch (ContentAccessException e) {
e.printStackTrace();
}
}
/*
* Create parent directories that don't exist when writing a file This actually makes this
* implementation not compliant to the WebDAV RFC - but we have enough knowledge about how the
* collection is being used to do this reasonably and some versions of Maven's WebDAV don't correctly
* create the collections themselves.
*/
StorageAsset rootDirectory = managedRepositoryContent.getRepository().getRoot();
StorageAsset destDir = rootDirectory.resolve(logicalResource.getPath()).getParent();
if (!destDir.exists()) {
try {
destDir.create();
} catch (IOException e) {
log.error("Could not create directory {}: {}", destDir, e.getMessage(), e);
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not create directory " + destDir);
}
String relPath = PathUtil.getRelative(rootDirectory.getPath(), destDir.getPath());
log.debug("Creating destination directory '{}' (current user '{}')", destDir.getName(), activePrincipal);
triggerAuditEvent(request.getRemoteAddr(), managedRepositoryContent.getId(), relPath, AuditEvent.CREATE_DIR, activePrincipal);
}
}
}
return resource;
}
Aggregations