use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class JcrMetadataRepository method removeMetadataFacet.
@Override
public void removeMetadataFacet(String repositoryId, String facetId, String name) throws MetadataRepositoryException {
try {
Node root = getJcrSession().getRootNode();
String path = getFacetPath(repositoryId, facetId, name);
if (root.hasNode(path)) {
Node node = root.getNode(path);
do {
// also remove empty container nodes
Node parent = node.getParent();
node.remove();
node = parent;
} while (!node.hasNodes());
}
} catch (RepositoryException e) {
throw new MetadataRepositoryException(e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class JcrMetadataRepository method removeNamespace.
@Override
public void removeNamespace(String repositoryId, String projectId) throws MetadataRepositoryException {
try {
Node root = getJcrSession().getRootNode();
String path = getNamespacePath(repositoryId, projectId);
if (root.hasNode(path)) {
Node node = root.getNode(path);
if (node.isNodeType(NAMESPACE_NODE_TYPE)) {
node.remove();
}
}
} catch (RepositoryException e) {
throw new MetadataRepositoryException(e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class JcrMetadataRepository method getArtifactsByChecksum.
@Override
public List<ArtifactMetadata> getArtifactsByChecksum(String repositoryId, String checksum) throws MetadataRepositoryException {
List<ArtifactMetadata> artifacts;
String q = getArtifactQuery(repositoryId) + " AND ([sha1] = $checksum OR [md5] = $checksum)";
try {
Query query = getJcrSession().getWorkspace().getQueryManager().createQuery(q, Query.JCR_SQL2);
ValueFactory valueFactory = getJcrSession().getValueFactory();
query.bindValue("checksum", valueFactory.createValue(checksum));
QueryResult result = query.execute();
artifacts = new ArrayList<>();
for (Node n : JcrUtils.getNodes(result)) {
artifacts.add(getArtifactFromNode(repositoryId, n));
}
} catch (RepositoryException e) {
throw new MetadataRepositoryException(e.getMessage(), e);
}
return artifacts;
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class RepositoryProblemEventListener method addArtifact.
@Override
public void addArtifact(RepositorySession session, String repoId, String namespace, String projectId, ProjectVersionMetadata metadata) {
// Remove problems associated with this version on successful addition
// TODO: this removes all problems - do we need something that just remove the problems we know are corrected?
String name = RepositoryProblemFacet.createName(namespace, projectId, metadata.getId(), null);
try {
MetadataRepository metadataRepository = session.getRepository();
metadataRepository.removeMetadataFacet(repoId, RepositoryProblemFacet.FACET_ID, name);
session.markDirty();
} catch (MetadataRepositoryException e) {
log.warn("Unable to remove repository problem facets for the version being corrected in the repository: {}", e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class DuplicateArtifactsConsumer method processFile.
@Override
public void processFile(String path) throws ConsumerException {
Path artifactFile = this.repositoryDir.resolve(path);
// TODO: would be quicker to somehow make sure it ran after the update database consumer, or as a part of that
// perhaps could use an artifact context that is retained for all consumers? First in can set the SHA-1
// alternatively this could come straight from the storage resolver, which could populate the artifact metadata
// in the later parse call with the desired checksum and use that
String checksumSha1;
ChecksummedFile checksummedFile = new ChecksummedFile(artifactFile);
try {
checksumSha1 = checksummedFile.calculateChecksum(ChecksumAlgorithm.SHA1);
} catch (IOException e) {
throw new ConsumerException(e.getMessage(), e);
}
MetadataRepository metadataRepository = repositorySession.getRepository();
Collection<ArtifactMetadata> results;
try {
results = metadataRepository.getArtifactsByChecksum(repoId, checksumSha1);
} catch (MetadataRepositoryException e) {
repositorySession.close();
throw new ConsumerException(e.getMessage(), e);
}
if (CollectionUtils.isNotEmpty(results)) {
ArtifactMetadata originalArtifact;
try {
originalArtifact = pathTranslator.getArtifactForPath(repoId, path);
} catch (Exception e) {
log.warn("Not reporting problem for invalid artifact in checksum check: {}", e.getMessage());
return;
}
for (ArtifactMetadata dupArtifact : results) {
String id = path.substring(path.lastIndexOf('/') + 1);
if (dupArtifact.getId().equals(id) && dupArtifact.getNamespace().equals(originalArtifact.getNamespace()) && dupArtifact.getProject().equals(originalArtifact.getProject()) && dupArtifact.getVersion().equals(originalArtifact.getVersion())) {
// Skip reference to itself.
log.debug("Not counting duplicate for artifact {} for path {}", dupArtifact, path);
continue;
}
RepositoryProblemFacet problem = new RepositoryProblemFacet();
problem.setRepositoryId(repoId);
problem.setNamespace(originalArtifact.getNamespace());
problem.setProject(originalArtifact.getProject());
problem.setVersion(originalArtifact.getVersion());
problem.setId(id);
// FIXME: need to get the right storage resolver for the repository the dupe artifact is in, it might be
// a different type
// FIXME: we need the project version here, not the artifact version
problem.setMessage("Duplicate Artifact Detected: " + path + " <--> " + pathTranslator.toPath(dupArtifact.getNamespace(), dupArtifact.getProject(), dupArtifact.getVersion(), dupArtifact.getId()));
problem.setProblem("duplicate-artifact");
try {
metadataRepository.addMetadataFacet(repoId, problem);
} catch (MetadataRepositoryException e) {
throw new ConsumerException(e.getMessage(), e);
}
}
}
}
Aggregations