Search in sources :

Example 16 with ArchivaRepositoryMetadata

use of org.apache.archiva.model.ArchivaRepositoryMetadata in project archiva by apache.

the class MetadataTools method updateMetadata.

public void updateMetadata(ManagedRepositoryContent managedRepository, String logicalResource) throws RepositoryMetadataException {
    final Path metadataFile = Paths.get(managedRepository.getRepoRoot(), logicalResource);
    ArchivaRepositoryMetadata metadata = null;
    // Gather and merge all metadata available
    List<ArchivaRepositoryMetadata> metadatas = getMetadatasForManagedRepository(managedRepository, logicalResource);
    for (ArchivaRepositoryMetadata proxiedMetadata : metadatas) {
        if (metadata == null) {
            metadata = proxiedMetadata;
            continue;
        }
        metadata = RepositoryMetadataMerge.merge(metadata, proxiedMetadata);
    }
    if (metadata == null) {
        log.debug("No metadata to update for {}", logicalResource);
        return;
    }
    Set<String> availableVersions = new HashSet<String>();
    List<String> metadataAvailableVersions = metadata.getAvailableVersions();
    if (metadataAvailableVersions != null) {
        availableVersions.addAll(metadataAvailableVersions);
    }
    availableVersions = findPossibleVersions(availableVersions, metadataFile.getParent());
    if (availableVersions.size() > 0) {
        updateMetadataVersions(availableVersions, metadata);
    }
    RepositoryMetadataWriter.write(metadata, metadataFile);
    ChecksummedFile checksum = new ChecksummedFile(metadataFile);
    checksum.fixChecksums(algorithms);
}
Also used : Path(java.nio.file.Path) ArchivaRepositoryMetadata(org.apache.archiva.model.ArchivaRepositoryMetadata) ChecksummedFile(org.apache.archiva.checksum.ChecksummedFile) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Example 17 with ArchivaRepositoryMetadata

use of org.apache.archiva.model.ArchivaRepositoryMetadata in project archiva by apache.

the class ArchivaDavResourceFactory method createResource.

@Override
public DavResource createResource(final DavResourceLocator locator, final DavServletRequest request, final DavServletResponse response) throws DavException {
    ArchivaDavResourceLocator archivaLocator = checkLocatorIsInstanceOfRepositoryLocator(locator);
    RepositoryGroupConfiguration repoGroupConfig = archivaConfiguration.getConfiguration().getRepositoryGroupsAsMap().get(archivaLocator.getRepositoryId());
    String activePrincipal = getActivePrincipal(request);
    List<String> resourcesInAbsolutePath = new ArrayList<>();
    boolean readMethod = WebdavMethodUtil.isReadMethod(request.getMethod());
    DavResource resource;
    if (repoGroupConfig != null) {
        if (!readMethod) {
            throw new DavException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "Write method not allowed for repository groups.");
        }
        log.debug("Repository group '{}' accessed by '{}", repoGroupConfig.getId(), activePrincipal);
        // handle browse requests for virtual repos
        if (getLogicalResource(archivaLocator, null, true).endsWith("/")) {
            DavResource davResource = getResourceFromGroup(request, repoGroupConfig.getRepositories(), archivaLocator, repoGroupConfig);
            setHeaders(response, locator, davResource, true);
            return davResource;
        } else {
            // make a copy to avoid potential concurrent modifications (eg. by configuration)
            // TODO: ultimately, locking might be more efficient than copying in this fashion since updates are
            // infrequent
            List<String> repositories = new ArrayList<>(repoGroupConfig.getRepositories());
            resource = processRepositoryGroup(request, archivaLocator, repositories, activePrincipal, resourcesInAbsolutePath, repoGroupConfig);
        }
    } else {
        try {
            RemoteRepository remoteRepository = remoteRepositoryAdmin.getRemoteRepository(archivaLocator.getRepositoryId());
            if (remoteRepository != null) {
                String logicalResource = getLogicalResource(archivaLocator, null, false);
                IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext(remoteRepository);
                Path resourceFile = StringUtils.equals(logicalResource, "/") ? Paths.get(indexingContext.getIndexDirectoryFile().getParent()) : Paths.get(indexingContext.getIndexDirectoryFile().getParent(), logicalResource);
                resource = new // 
                ArchivaDavResource(// 
                resourceFile.toAbsolutePath().toString(), // 
                locator.getResourcePath(), // 
                null, // 
                request.getRemoteAddr(), // 
                activePrincipal, // 
                request.getDavSession(), // 
                archivaLocator, // 
                this, // 
                mimeTypes, // 
                auditListeners, // 
                scheduler, fileLockManager);
                setHeaders(response, locator, resource, false);
                return resource;
            }
        } catch (RepositoryAdminException e) {
            log.debug("RepositoryException remote repository with d'{}' not found, msg: {}", archivaLocator.getRepositoryId(), e.getMessage());
        }
        ManagedRepository repo = repositoryRegistry.getManagedRepository(archivaLocator.getRepositoryId());
        if (repo == null) {
            throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Invalid repository: " + archivaLocator.getRepositoryId());
        }
        ManagedRepositoryContent managedRepositoryContent = repo.getContent();
        if (managedRepositoryContent == null) {
            log.error("Inconsistency detected. Repository content not found for '{}'", archivaLocator.getRepositoryId());
            throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Invalid repository: " + archivaLocator.getRepositoryId());
        }
        log.debug("Managed repository '{}' accessed by '{}'", managedRepositoryContent.getId(), activePrincipal);
        resource = processRepository(request, archivaLocator, activePrincipal, managedRepositoryContent, repo);
        String logicalResource = getLogicalResource(archivaLocator, null, false);
        resourcesInAbsolutePath.add(Paths.get(managedRepositoryContent.getRepoRoot(), logicalResource).toAbsolutePath().toString());
    }
    String requestedResource = request.getRequestURI();
    // merge metadata only when requested via the repo group
    if ((repositoryRequest.isMetadata(requestedResource) || repositoryRequest.isMetadataSupportFile(requestedResource)) && repoGroupConfig != null) {
        // this should only be at the project level not version level!
        if (isProjectReference(requestedResource)) {
            ArchivaDavResource res = (ArchivaDavResource) resource;
            String filePath = StringUtils.substringBeforeLast(res.getLocalResource().toAbsolutePath().toString().replace('\\', '/'), "/");
            filePath = filePath + "/maven-metadata-" + repoGroupConfig.getId() + ".xml";
            // for MRM-872 handle checksums of the merged metadata files
            if (repositoryRequest.isSupportFile(requestedResource)) {
                Path metadataChecksum = Paths.get(filePath + "." + StringUtils.substringAfterLast(requestedResource, "."));
                if (Files.exists(metadataChecksum)) {
                    LogicalResource logicalResource = new LogicalResource(getLogicalResource(archivaLocator, null, false));
                    resource = new ArchivaDavResource(metadataChecksum.toAbsolutePath().toString(), logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager);
                }
            } else {
                if (resourcesInAbsolutePath != null && resourcesInAbsolutePath.size() > 1) {
                    // merge the metadata of all repos under group
                    ArchivaRepositoryMetadata mergedMetadata = new ArchivaRepositoryMetadata();
                    for (String resourceAbsPath : resourcesInAbsolutePath) {
                        try {
                            Path metadataFile = Paths.get(resourceAbsPath);
                            ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read(metadataFile);
                            mergedMetadata = RepositoryMetadataMerge.merge(mergedMetadata, repoMetadata);
                        } catch (XMLException e) {
                            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while reading metadata file.");
                        } catch (RepositoryMetadataException r) {
                            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while merging metadata file.");
                        }
                    }
                    try {
                        Path resourceFile = writeMergedMetadataToFile(mergedMetadata, filePath);
                        LogicalResource logicalResource = new LogicalResource(getLogicalResource(archivaLocator, null, false));
                        resource = new ArchivaDavResource(resourceFile.toAbsolutePath().toString(), logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager);
                    } catch (RepositoryMetadataException r) {
                        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while writing metadata file.");
                    } catch (IOException ie) {
                        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while generating checksum files.");
                    } catch (DigesterException de) {
                        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while generating checksum files." + de.getMessage());
                    }
                }
            }
        }
    }
    setHeaders(response, locator, resource, false);
    // compatibility with MRM-440 to ensure browsing the repository works ok
    if (resource.isCollection() && !request.getRequestURI().endsWith("/")) {
        throw new BrowserRedirectException(resource.getHref());
    }
    resource.addLockManager(lockManager);
    return resource;
}
Also used : RepositoryGroupConfiguration(org.apache.archiva.configuration.RepositoryGroupConfiguration) Path(java.nio.file.Path) DavResource(org.apache.jackrabbit.webdav.DavResource) ManagedRepository(org.apache.archiva.repository.ManagedRepository) DavException(org.apache.jackrabbit.webdav.DavException) ArrayList(java.util.ArrayList) RemoteRepository(org.apache.archiva.admin.model.beans.RemoteRepository) IOException(java.io.IOException) RepositoryAdminException(org.apache.archiva.admin.model.RepositoryAdminException) XMLException(org.apache.archiva.xml.XMLException) RepositoryMetadataException(org.apache.archiva.repository.metadata.RepositoryMetadataException) ManagedRepositoryContent(org.apache.archiva.repository.ManagedRepositoryContent) DigesterException(org.codehaus.plexus.digest.DigesterException) IndexingContext(org.apache.maven.index.context.IndexingContext) ArchivaRepositoryMetadata(org.apache.archiva.model.ArchivaRepositoryMetadata)

Example 18 with ArchivaRepositoryMetadata

use of org.apache.archiva.model.ArchivaRepositoryMetadata in project archiva by apache.

the class DefaultFileUploadService method updateProjectMetadata.

/**
 * Update artifact level metadata. If it does not exist, create the metadata and fix checksums if necessary.
 */
private void updateProjectMetadata(String targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber, boolean fixChecksums, FileMetadata fileMetadata, String groupId, String artifactId, String version, String packaging) throws RepositoryMetadataException {
    List<String> availableVersions = new ArrayList<>();
    String latestVersion = version;
    Path projectDir = Paths.get(targetPath).getParent();
    Path projectMetadataFile = projectDir.resolve(MetadataTools.MAVEN_METADATA);
    ArchivaRepositoryMetadata projectMetadata = getMetadata(projectMetadataFile);
    if (Files.exists(projectMetadataFile)) {
        availableVersions = projectMetadata.getAvailableVersions();
        Collections.sort(availableVersions, VersionComparator.getInstance());
        if (!availableVersions.contains(version)) {
            availableVersions.add(version);
        }
        latestVersion = availableVersions.get(availableVersions.size() - 1);
    } else {
        availableVersions.add(version);
        projectMetadata.setGroupId(groupId);
        projectMetadata.setArtifactId(artifactId);
    }
    if (projectMetadata.getGroupId() == null) {
        projectMetadata.setGroupId(groupId);
    }
    if (projectMetadata.getArtifactId() == null) {
        projectMetadata.setArtifactId(artifactId);
    }
    projectMetadata.setLatestVersion(latestVersion);
    projectMetadata.setLastUpdatedTimestamp(lastUpdatedTimestamp);
    projectMetadata.setAvailableVersions(availableVersions);
    if (!VersionUtil.isSnapshot(version)) {
        projectMetadata.setReleasedVersion(latestVersion);
    }
    RepositoryMetadataWriter.write(projectMetadata, projectMetadataFile);
    if (fixChecksums) {
        fixChecksums(projectMetadataFile);
    }
}
Also used : Path(java.nio.file.Path) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) ArrayList(java.util.ArrayList) ArchivaRepositoryMetadata(org.apache.archiva.model.ArchivaRepositoryMetadata)

Example 19 with ArchivaRepositoryMetadata

use of org.apache.archiva.model.ArchivaRepositoryMetadata in project archiva by apache.

the class DefaultFileUploadService method saveFile.

protected void saveFile(String repositoryId, FileMetadata fileMetadata, boolean generatePom, String groupId, String artifactId, String version, String packaging) throws ArchivaRestServiceException {
    try {
        ManagedRepository repoConfig = managedRepositoryAdmin.getManagedRepository(repositoryId);
        ArtifactReference artifactReference = new ArtifactReference();
        artifactReference.setArtifactId(artifactId);
        artifactReference.setGroupId(groupId);
        artifactReference.setVersion(version);
        artifactReference.setClassifier(fileMetadata.getClassifier());
        artifactReference.setType(StringUtils.isEmpty(fileMetadata.getPackaging()) ? packaging : fileMetadata.getPackaging());
        ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent(repositoryId);
        String artifactPath = repository.toPath(artifactReference);
        int lastIndex = artifactPath.lastIndexOf('/');
        String path = artifactPath.substring(0, lastIndex);
        Path targetPath = Paths.get(repoConfig.getLocation(), path);
        log.debug("artifactPath: {} found targetPath: {}", artifactPath, targetPath);
        Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
        int newBuildNumber = -1;
        String timestamp = null;
        Path versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA);
        ArchivaRepositoryMetadata versionMetadata = getMetadata(versionMetadataFile);
        if (VersionUtil.isSnapshot(version)) {
            TimeZone timezone = TimeZone.getTimeZone("UTC");
            DateFormat fmt = new SimpleDateFormat("yyyyMMdd.HHmmss");
            fmt.setTimeZone(timezone);
            timestamp = fmt.format(lastUpdatedTimestamp);
            if (versionMetadata.getSnapshotVersion() != null) {
                newBuildNumber = versionMetadata.getSnapshotVersion().getBuildNumber() + 1;
            } else {
                newBuildNumber = 1;
            }
        }
        if (!Files.exists(targetPath)) {
            Files.createDirectories(targetPath);
        }
        String filename = artifactPath.substring(lastIndex + 1);
        if (VersionUtil.isSnapshot(version)) {
            filename = filename.replaceAll(VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber);
        }
        boolean fixChecksums = !(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums"));
        try {
            Path targetFile = targetPath.resolve(filename);
            if (Files.exists(targetFile) && !VersionUtil.isSnapshot(version) && repoConfig.isBlockRedeployments()) {
                throw new ArchivaRestServiceException("Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.", Response.Status.BAD_REQUEST.getStatusCode(), null);
            } else {
                copyFile(Paths.get(fileMetadata.getServerFileName()), targetPath, filename, fixChecksums);
                triggerAuditEvent(repository.getId(), path + "/" + filename, AuditEvent.UPLOAD_FILE);
                queueRepositoryTask(repository.getId(), targetFile);
            }
        } catch (IOException ie) {
            log.error("IOException copying file: {}", ie.getMessage(), ie);
            throw new ArchivaRestServiceException("Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.", Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
        }
        if (generatePom) {
            String pomFilename = filename;
            if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
                pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
            }
            pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";
            try {
                Path generatedPomFile = createPom(targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging);
                triggerAuditEvent(repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE);
                if (fixChecksums) {
                    fixChecksums(generatedPomFile);
                }
                queueRepositoryTask(repoConfig.getId(), generatedPomFile);
            } catch (IOException ie) {
                throw new ArchivaRestServiceException("Error encountered while writing pom file: " + ie.getMessage(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
            }
        }
        // explicitly update only if metadata-updater consumer is not enabled!
        if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) {
            updateProjectMetadata(targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version, packaging);
            if (VersionUtil.isSnapshot(version)) {
                updateVersionMetadata(versionMetadata, versionMetadataFile, lastUpdatedTimestamp, timestamp, newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version, packaging);
            }
        }
    } catch (RepositoryNotFoundException re) {
        throw new ArchivaRestServiceException("Target repository cannot be found: " + re.getMessage(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), re);
    } catch (RepositoryException rep) {
        throw new ArchivaRestServiceException("Repository exception: " + rep.getMessage(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep);
    } catch (RepositoryAdminException e) {
        throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
    } catch (IOException e) {
        throw new ArchivaRestServiceException("Repository exception " + e.getMessage(), Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
    }
}
Also used : Path(java.nio.file.Path) ManagedRepository(org.apache.archiva.admin.model.beans.ManagedRepository) RepositoryException(org.apache.archiva.repository.RepositoryException) IOException(java.io.IOException) RepositoryNotFoundException(org.apache.archiva.repository.RepositoryNotFoundException) RepositoryAdminException(org.apache.archiva.admin.model.RepositoryAdminException) Date(java.util.Date) TimeZone(java.util.TimeZone) DateFormat(java.text.DateFormat) SimpleDateFormat(java.text.SimpleDateFormat) ArchivaRestServiceException(org.apache.archiva.rest.api.services.ArchivaRestServiceException) ManagedRepositoryContent(org.apache.archiva.repository.ManagedRepositoryContent) ArchivaRepositoryMetadata(org.apache.archiva.model.ArchivaRepositoryMetadata) ArtifactReference(org.apache.archiva.model.ArtifactReference) SimpleDateFormat(java.text.SimpleDateFormat)

Example 20 with ArchivaRepositoryMetadata

use of org.apache.archiva.model.ArchivaRepositoryMetadata in project archiva by apache.

the class MetadataTransferTest method assertRepoProjectMetadata.

/**
 * Ensures that the repository specific maven metadata file exists, and contains the appropriate
 * list of expected versions within.
 *
 * @param proxiedRepoId
 * @param requestedResource
 * @param expectedProxyVersions
 */
private void assertRepoProjectMetadata(String proxiedRepoId, String requestedResource, String[] expectedProxyVersions) throws Exception {
    String proxiedFile = metadataTools.getRepositorySpecificName(proxiedRepoId, requestedResource);
    Path actualFile = managedDefaultDir.resolve(proxiedFile);
    assertTrue(Files.exists(actualFile));
    ProjectReference metadata = createProjectReference(requestedResource);
    // Build expected metadata XML
    StringWriter expectedMetadataXml = new StringWriter();
    ArchivaRepositoryMetadata m = new ArchivaRepositoryMetadata();
    m.setGroupId(metadata.getGroupId());
    m.setArtifactId(metadata.getArtifactId());
    if (expectedProxyVersions != null) {
        m.getAvailableVersions().addAll(Arrays.asList(expectedProxyVersions));
    }
    RepositoryMetadataWriter.write(m, expectedMetadataXml);
    // Compare the file to the actual contents.
    assertMetadataEquals(expectedMetadataXml.toString(), actualFile);
}
Also used : Path(java.nio.file.Path) ProjectReference(org.apache.archiva.model.ProjectReference) StringWriter(java.io.StringWriter) ArchivaRepositoryMetadata(org.apache.archiva.model.ArchivaRepositoryMetadata)

Aggregations

ArchivaRepositoryMetadata (org.apache.archiva.model.ArchivaRepositoryMetadata)37 Path (java.nio.file.Path)29 StringWriter (java.io.StringWriter)8 XMLException (org.apache.archiva.xml.XMLException)8 ArrayList (java.util.ArrayList)7 SnapshotVersion (org.apache.archiva.model.SnapshotVersion)7 Test (org.junit.Test)7 IOException (java.io.IOException)5 RepositoryAdminException (org.apache.archiva.admin.model.RepositoryAdminException)5 ManagedRepository (org.apache.archiva.admin.model.beans.ManagedRepository)4 Plugin (org.apache.archiva.model.Plugin)4 ManagedRepositoryContent (org.apache.archiva.repository.ManagedRepositoryContent)4 Date (java.util.Date)3 ChecksummedFile (org.apache.archiva.checksum.ChecksummedFile)3 ArtifactReference (org.apache.archiva.model.ArtifactReference)3 RepositoryException (org.apache.archiva.repository.RepositoryException)3 ArchivaRestServiceException (org.apache.archiva.rest.api.services.ArchivaRestServiceException)3 DateFormat (java.text.DateFormat)2 SimpleDateFormat (java.text.SimpleDateFormat)2 TimeZone (java.util.TimeZone)2