use of org.apache.archiva.repository.metadata.RepositoryMetadataException in project archiva by apache.
the class CleanupReleasedSnapshotsRepositoryPurge method updateMetadata.
/*
* TODO: Uses a deprecated API, but if we use the API with location string, it does not work as expected
* -> not sure what needs to be changed here.
*/
@SuppressWarnings("deprecation")
private void updateMetadata(ArtifactReference artifact) {
VersionedReference versionRef = new VersionedReference();
versionRef.setGroupId(artifact.getGroupId());
versionRef.setArtifactId(artifact.getArtifactId());
versionRef.setVersion(artifact.getVersion());
ProjectReference projectRef = new ProjectReference();
projectRef.setGroupId(artifact.getGroupId());
projectRef.setArtifactId(artifact.getArtifactId());
try {
metadataTools.updateMetadata(repository, versionRef);
} catch (ContentNotFoundException e) {
// Ignore. (Just means we have no snapshot versions left to reference).
} catch (RepositoryMetadataException e) {
// Ignore.
} catch (IOException e) {
// Ignore.
} catch (LayoutException e) {
// Ignore.
}
try {
metadataTools.updateMetadata(repository, projectRef);
} catch (ContentNotFoundException e) {
// Ignore. (Just means we have no snapshot versions left to reference).
} catch (RepositoryMetadataException e) {
// Ignore.
} catch (IOException e) {
// Ignore.
} catch (LayoutException e) {
// Ignore.
}
}
use of org.apache.archiva.repository.metadata.RepositoryMetadataException in project archiva by apache.
the class ArchivaDavResourceFactory method createResource.
@Override
public DavResource createResource(final DavResourceLocator locator, final DavServletRequest request, final DavServletResponse response) throws DavException {
ArchivaDavResourceLocator archivaLocator = checkLocatorIsInstanceOfRepositoryLocator(locator);
RepositoryGroupConfiguration repoGroupConfig = archivaConfiguration.getConfiguration().getRepositoryGroupsAsMap().get(archivaLocator.getRepositoryId());
String activePrincipal = getActivePrincipal(request);
List<String> resourcesInAbsolutePath = new ArrayList<>();
boolean readMethod = WebdavMethodUtil.isReadMethod(request.getMethod());
DavResource resource;
if (repoGroupConfig != null) {
if (!readMethod) {
throw new DavException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "Write method not allowed for repository groups.");
}
log.debug("Repository group '{}' accessed by '{}", repoGroupConfig.getId(), activePrincipal);
// handle browse requests for virtual repos
if (getLogicalResource(archivaLocator, null, true).endsWith("/")) {
DavResource davResource = getResourceFromGroup(request, repoGroupConfig.getRepositories(), archivaLocator, repoGroupConfig);
setHeaders(response, locator, davResource, true);
return davResource;
} else {
// make a copy to avoid potential concurrent modifications (eg. by configuration)
// TODO: ultimately, locking might be more efficient than copying in this fashion since updates are
// infrequent
List<String> repositories = new ArrayList<>(repoGroupConfig.getRepositories());
resource = processRepositoryGroup(request, archivaLocator, repositories, activePrincipal, resourcesInAbsolutePath, repoGroupConfig);
}
} else {
try {
RemoteRepository remoteRepository = remoteRepositoryAdmin.getRemoteRepository(archivaLocator.getRepositoryId());
if (remoteRepository != null) {
String logicalResource = getLogicalResource(archivaLocator, null, false);
IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext(remoteRepository);
Path resourceFile = StringUtils.equals(logicalResource, "/") ? Paths.get(indexingContext.getIndexDirectoryFile().getParent()) : Paths.get(indexingContext.getIndexDirectoryFile().getParent(), logicalResource);
resource = new //
ArchivaDavResource(//
resourceFile.toAbsolutePath().toString(), //
locator.getResourcePath(), //
null, //
request.getRemoteAddr(), //
activePrincipal, //
request.getDavSession(), //
archivaLocator, //
this, //
mimeTypes, //
auditListeners, //
scheduler, fileLockManager);
setHeaders(response, locator, resource, false);
return resource;
}
} catch (RepositoryAdminException e) {
log.debug("RepositoryException remote repository with d'{}' not found, msg: {}", archivaLocator.getRepositoryId(), e.getMessage());
}
ManagedRepository repo = repositoryRegistry.getManagedRepository(archivaLocator.getRepositoryId());
if (repo == null) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Invalid repository: " + archivaLocator.getRepositoryId());
}
ManagedRepositoryContent managedRepositoryContent = repo.getContent();
if (managedRepositoryContent == null) {
log.error("Inconsistency detected. Repository content not found for '{}'", archivaLocator.getRepositoryId());
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Invalid repository: " + archivaLocator.getRepositoryId());
}
log.debug("Managed repository '{}' accessed by '{}'", managedRepositoryContent.getId(), activePrincipal);
resource = processRepository(request, archivaLocator, activePrincipal, managedRepositoryContent, repo);
String logicalResource = getLogicalResource(archivaLocator, null, false);
resourcesInAbsolutePath.add(Paths.get(managedRepositoryContent.getRepoRoot(), logicalResource).toAbsolutePath().toString());
}
String requestedResource = request.getRequestURI();
// merge metadata only when requested via the repo group
if ((repositoryRequest.isMetadata(requestedResource) || repositoryRequest.isMetadataSupportFile(requestedResource)) && repoGroupConfig != null) {
// this should only be at the project level not version level!
if (isProjectReference(requestedResource)) {
ArchivaDavResource res = (ArchivaDavResource) resource;
String filePath = StringUtils.substringBeforeLast(res.getLocalResource().toAbsolutePath().toString().replace('\\', '/'), "/");
filePath = filePath + "/maven-metadata-" + repoGroupConfig.getId() + ".xml";
// for MRM-872 handle checksums of the merged metadata files
if (repositoryRequest.isSupportFile(requestedResource)) {
Path metadataChecksum = Paths.get(filePath + "." + StringUtils.substringAfterLast(requestedResource, "."));
if (Files.exists(metadataChecksum)) {
LogicalResource logicalResource = new LogicalResource(getLogicalResource(archivaLocator, null, false));
resource = new ArchivaDavResource(metadataChecksum.toAbsolutePath().toString(), logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager);
}
} else {
if (resourcesInAbsolutePath != null && resourcesInAbsolutePath.size() > 1) {
// merge the metadata of all repos under group
ArchivaRepositoryMetadata mergedMetadata = new ArchivaRepositoryMetadata();
for (String resourceAbsPath : resourcesInAbsolutePath) {
try {
Path metadataFile = Paths.get(resourceAbsPath);
ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read(metadataFile);
mergedMetadata = RepositoryMetadataMerge.merge(mergedMetadata, repoMetadata);
} catch (XMLException e) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while reading metadata file.");
} catch (RepositoryMetadataException r) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while merging metadata file.");
}
}
try {
Path resourceFile = writeMergedMetadataToFile(mergedMetadata, filePath);
LogicalResource logicalResource = new LogicalResource(getLogicalResource(archivaLocator, null, false));
resource = new ArchivaDavResource(resourceFile.toAbsolutePath().toString(), logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager);
} catch (RepositoryMetadataException r) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while writing metadata file.");
} catch (IOException ie) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while generating checksum files.");
} catch (DigesterException de) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while generating checksum files." + de.getMessage());
}
}
}
}
}
setHeaders(response, locator, resource, false);
// compatibility with MRM-440 to ensure browsing the repository works ok
if (resource.isCollection() && !request.getRequestURI().endsWith("/")) {
throw new BrowserRedirectException(resource.getHref());
}
resource.addLockManager(lockManager);
return resource;
}
use of org.apache.archiva.repository.metadata.RepositoryMetadataException in project archiva by apache.
the class DefaultRepositoryProxyConnectors method fetchMetadataFromProxies.
@Override
public ProxyFetchResult fetchMetadataFromProxies(ManagedRepositoryContent repository, String logicalPath) {
Path localFile = Paths.get(repository.getRepoRoot(), logicalPath);
Properties requestProperties = new Properties();
requestProperties.setProperty("filetype", "metadata");
boolean metadataNeedsUpdating = false;
long originalTimestamp = getLastModified(localFile);
List<ProxyConnector> connectors = new ArrayList<>(getProxyConnectors(repository));
for (ProxyConnector connector : connectors) {
if (connector.isDisabled()) {
continue;
}
RemoteRepositoryContent targetRepository = connector.getTargetRepository();
Path localRepoFile = toLocalRepoFile(repository, targetRepository, logicalPath);
long originalMetadataTimestamp = getLastModified(localRepoFile);
try {
transferFile(connector, targetRepository, logicalPath, repository, localRepoFile, requestProperties, true);
if (hasBeenUpdated(localRepoFile, originalMetadataTimestamp)) {
metadataNeedsUpdating = true;
}
} catch (NotFoundException e) {
log.debug("Metadata {} not found on remote repository '{}'.", logicalPath, targetRepository.getRepository().getId(), e);
} catch (NotModifiedException e) {
log.debug("Metadata {} not updated on remote repository '{}'.", logicalPath, targetRepository.getRepository().getId(), e);
} catch (ProxyException | RepositoryAdminException e) {
log.warn("Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}", targetRepository.getRepository().getId(), logicalPath, e.getMessage());
log.debug("Full stack trace", e);
}
}
if (hasBeenUpdated(localFile, originalTimestamp)) {
metadataNeedsUpdating = true;
}
if (metadataNeedsUpdating || !Files.exists(localFile)) {
try {
metadataTools.updateMetadata(repository, logicalPath);
} catch (RepositoryMetadataException e) {
log.warn("Unable to update metadata {}:{}", localFile.toAbsolutePath(), e.getMessage(), e);
}
}
if (fileExists(localFile)) {
return new ProxyFetchResult(localFile, metadataNeedsUpdating);
}
return new ProxyFetchResult(null, false);
}
use of org.apache.archiva.repository.metadata.RepositoryMetadataException in project archiva by apache.
the class MetadataUpdaterConsumer method updateProjectMetadata.
private void updateProjectMetadata(ArtifactReference artifact, String path) {
ProjectReference projectRef = new ProjectReference();
projectRef.setGroupId(artifact.getGroupId());
projectRef.setArtifactId(artifact.getArtifactId());
try {
String metadataPath = this.metadataTools.toPath(projectRef);
Path projectMetadata = this.repositoryDir.resolve(metadataPath);
if (Files.exists(projectMetadata) && (Files.getLastModifiedTime(projectMetadata).toMillis() >= this.scanStartTimestamp)) {
// This metadata is up to date. skip it.
log.debug("Skipping uptodate metadata: {}", this.metadataTools.toPath(projectRef));
return;
}
metadataTools.updateMetadata(this.repository, metadataPath);
log.debug("Updated metadata: {}", this.metadataTools.toPath(projectRef));
} catch (RepositoryMetadataException e) {
log.error("Unable to write project metadat for artifact [{}]:", path, e);
triggerConsumerError(TYPE_METADATA_WRITE_FAILURE, "Unable to write project metadata for artifact [" + path + "]: " + e.getMessage());
} catch (IOException e) {
log.warn("Project metadata not written due to IO warning: ", e);
triggerConsumerWarning(TYPE_METADATA_IO, "Project metadata not written due to IO warning: " + e.getMessage());
}
}
use of org.apache.archiva.repository.metadata.RepositoryMetadataException in project archiva by apache.
the class MetadataUpdaterConsumer method updateVersionMetadata.
private void updateVersionMetadata(ArtifactReference artifact, String path) {
VersionedReference versionRef = new VersionedReference();
versionRef.setGroupId(artifact.getGroupId());
versionRef.setArtifactId(artifact.getArtifactId());
versionRef.setVersion(artifact.getVersion());
try {
String metadataPath = this.metadataTools.toPath(versionRef);
Path projectMetadata = this.repositoryDir.resolve(metadataPath);
if (Files.exists(projectMetadata) && (Files.getLastModifiedTime(projectMetadata).toMillis() >= this.scanStartTimestamp)) {
// This metadata is up to date. skip it.
log.debug("Skipping uptodate metadata: {}", this.metadataTools.toPath(versionRef));
return;
}
metadataTools.updateMetadata(this.repository, metadataPath);
log.debug("Updated metadata: {}", this.metadataTools.toPath(versionRef));
} catch (RepositoryMetadataException e) {
log.error("Unable to write version metadata for artifact [{}]: ", path, e);
triggerConsumerError(TYPE_METADATA_WRITE_FAILURE, "Unable to write version metadata for artifact [" + path + "]: " + e.getMessage());
} catch (IOException e) {
log.warn("Version metadata not written due to IO warning: ", e);
triggerConsumerWarning(TYPE_METADATA_IO, "Version metadata not written due to IO warning: " + e.getMessage());
}
}
Aggregations