use of org.apache.archiva.metadata.model.facets.RepositoryProblemFacet in project archiva by apache.
the class Maven2RepositoryStorage method readProjectVersionMetadata.
@Override
public ProjectVersionMetadata readProjectVersionMetadata(ReadMetadataRequest readMetadataRequest) throws RepositoryStorageMetadataNotFoundException, RepositoryStorageMetadataInvalidException, RepositoryStorageRuntimeException {
try {
ManagedRepository managedRepository = repositoryRegistry.getManagedRepository(readMetadataRequest.getRepositoryId());
boolean isReleases = managedRepository.getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE);
boolean isSnapshots = managedRepository.getActiveReleaseSchemes().contains(ReleaseScheme.SNAPSHOT);
String artifactVersion = readMetadataRequest.getProjectVersion();
// olamy: in case of browsing via the ui we can mix repos (parent of a SNAPSHOT can come from release repo)
if (!readMetadataRequest.isBrowsingRequest()) {
if (VersionUtil.isSnapshot(artifactVersion)) {
// skygo trying to improve speed by honoring managed configuration MRM-1658
if (isReleases && !isSnapshots) {
throw new RepositoryStorageRuntimeException("lookforsnaponreleaseonly", "managed repo is configured for release only");
}
} else {
if (!isReleases && isSnapshots) {
throw new RepositoryStorageRuntimeException("lookforsreleaseonsneponly", "managed repo is configured for snapshot only");
}
}
}
Path basedir = Paths.get(managedRepository.getLocation());
if (VersionUtil.isSnapshot(artifactVersion)) {
Path metadataFile = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(), artifactVersion, METADATA_FILENAME);
try {
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read(metadataFile);
// re-adjust to timestamp if present, otherwise retain the original -SNAPSHOT filename
SnapshotVersion snapshotVersion = metadata.getSnapshotVersion();
if (snapshotVersion != null) {
artifactVersion = // remove SNAPSHOT from end
artifactVersion.substring(0, artifactVersion.length() - 8);
artifactVersion = artifactVersion + snapshotVersion.getTimestamp() + "-" + snapshotVersion.getBuildNumber();
}
} catch (XMLException e) {
// unable to parse metadata - LOGGER it, and continue with the version as the original SNAPSHOT version
LOGGER.warn("Invalid metadata: {} - {}", metadataFile, e.getMessage());
}
}
// TODO: won't work well with some other layouts, might need to convert artifact parts to ID by path translator
String id = readMetadataRequest.getProjectId() + "-" + artifactVersion + ".pom";
Path file = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(), id);
if (!Files.exists(file)) {
// metadata could not be resolved
throw new RepositoryStorageMetadataNotFoundException("The artifact's POM file '" + file.toAbsolutePath() + "' was missing");
}
// TODO: this is a workaround until we can properly resolve using proxies as well - this doesn't cache
// anything locally!
List<RemoteRepository> remoteRepositories = new ArrayList<>();
Map<String, NetworkProxy> networkProxies = new HashMap<>();
Map<String, List<ProxyConnector>> proxyConnectorsMap = proxyConnectorAdmin.getProxyConnectorAsMap();
List<ProxyConnector> proxyConnectors = proxyConnectorsMap.get(readMetadataRequest.getRepositoryId());
if (proxyConnectors != null) {
for (ProxyConnector proxyConnector : proxyConnectors) {
RemoteRepository remoteRepoConfig = repositoryRegistry.getRemoteRepository(proxyConnector.getTargetRepoId());
if (remoteRepoConfig != null) {
remoteRepositories.add(remoteRepoConfig);
NetworkProxy networkProxyConfig = networkProxyAdmin.getNetworkProxy(proxyConnector.getProxyId());
if (networkProxyConfig != null) {
// key/value: remote repo ID/proxy info
networkProxies.put(proxyConnector.getTargetRepoId(), networkProxyConfig);
}
}
}
}
// can have released parent pom
if (readMetadataRequest.isBrowsingRequest()) {
remoteRepositories.addAll(repositoryRegistry.getRemoteRepositories());
}
ModelBuildingRequest req = new DefaultModelBuildingRequest().setProcessPlugins(false).setPomFile(file.toFile()).setTwoPhaseBuilding(false).setValidationLevel(ModelBuildingRequest.VALIDATION_LEVEL_MINIMAL);
// MRM-1607. olamy this will resolve jdk profiles on the current running archiva jvm
req.setSystemProperties(System.getProperties());
// MRM-1411
req.setModelResolver(new RepositoryModelResolver(managedRepository, pathTranslator, wagonFactory, remoteRepositories, networkProxies, managedRepository));
Model model;
try {
model = builder.build(req).getEffectiveModel();
} catch (ModelBuildingException e) {
String msg = "The artifact's POM file '" + file + "' was invalid: " + e.getMessage();
List<ModelProblem> modelProblems = e.getProblems();
for (ModelProblem problem : modelProblems) {
// but setTwoPhaseBuilding(true) fix that
if (((problem.getException() instanceof FileNotFoundException || problem.getException() instanceof NoSuchFileException) && e.getModelId() != null && !e.getModelId().equals(problem.getModelId()))) {
LOGGER.warn("The artifact's parent POM file '{}' cannot be resolved. " + "Using defaults for project version metadata..", file);
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
metadata.setId(readMetadataRequest.getProjectVersion());
MavenProjectFacet facet = new MavenProjectFacet();
facet.setGroupId(readMetadataRequest.getNamespace());
facet.setArtifactId(readMetadataRequest.getProjectId());
facet.setPackaging("jar");
metadata.addFacet(facet);
String errMsg = "Error in resolving artifact's parent POM file. " + (problem.getException() == null ? problem.getMessage() : problem.getException().getMessage());
RepositoryProblemFacet repoProblemFacet = new RepositoryProblemFacet();
repoProblemFacet.setRepositoryId(readMetadataRequest.getRepositoryId());
repoProblemFacet.setId(readMetadataRequest.getRepositoryId());
repoProblemFacet.setMessage(errMsg);
repoProblemFacet.setProblem(errMsg);
repoProblemFacet.setProject(readMetadataRequest.getProjectId());
repoProblemFacet.setVersion(readMetadataRequest.getProjectVersion());
repoProblemFacet.setNamespace(readMetadataRequest.getNamespace());
metadata.addFacet(repoProblemFacet);
return metadata;
}
}
throw new RepositoryStorageMetadataInvalidException("invalid-pom", msg, e);
}
// Check if the POM is in the correct location
boolean correctGroupId = readMetadataRequest.getNamespace().equals(model.getGroupId());
boolean correctArtifactId = readMetadataRequest.getProjectId().equals(model.getArtifactId());
boolean correctVersion = readMetadataRequest.getProjectVersion().equals(model.getVersion());
if (!correctGroupId || !correctArtifactId || !correctVersion) {
StringBuilder message = new StringBuilder("Incorrect POM coordinates in '" + file + "':");
if (!correctGroupId) {
message.append("\nIncorrect group ID: ").append(model.getGroupId());
}
if (!correctArtifactId) {
message.append("\nIncorrect artifact ID: ").append(model.getArtifactId());
}
if (!correctVersion) {
message.append("\nIncorrect version: ").append(model.getVersion());
}
throw new RepositoryStorageMetadataInvalidException("mislocated-pom", message.toString());
}
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
metadata.setCiManagement(convertCiManagement(model.getCiManagement()));
metadata.setDescription(model.getDescription());
metadata.setId(readMetadataRequest.getProjectVersion());
metadata.setIssueManagement(convertIssueManagement(model.getIssueManagement()));
metadata.setLicenses(convertLicenses(model.getLicenses()));
metadata.setMailingLists(convertMailingLists(model.getMailingLists()));
metadata.setDependencies(convertDependencies(model.getDependencies()));
metadata.setName(model.getName());
metadata.setOrganization(convertOrganization(model.getOrganization()));
metadata.setScm(convertScm(model.getScm()));
metadata.setUrl(model.getUrl());
metadata.setProperties(model.getProperties());
MavenProjectFacet facet = new MavenProjectFacet();
facet.setGroupId(model.getGroupId() != null ? model.getGroupId() : model.getParent().getGroupId());
facet.setArtifactId(model.getArtifactId());
facet.setPackaging(model.getPackaging());
if (model.getParent() != null) {
MavenProjectParent parent = new MavenProjectParent();
parent.setGroupId(model.getParent().getGroupId());
parent.setArtifactId(model.getParent().getArtifactId());
parent.setVersion(model.getParent().getVersion());
facet.setParent(parent);
}
metadata.addFacet(facet);
return metadata;
} catch (RepositoryAdminException e) {
throw new RepositoryStorageRuntimeException("repo-admin", e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.model.facets.RepositoryProblemFacet in project archiva by apache.
the class DuplicateArtifactsConsumerTest method testConsumerArtifactDuplicated.
// TODO: Doesn't currently work
// public void testConsumerArtifactNotDuplicatedForOtherSnapshots()
// throws ConsumerException
// {
// when( metadataRepository.getArtifactsByChecksum( TEST_REPO, TEST_CHECKSUM ) ).thenReturn( Arrays.asList(
// TEST_METADATA, createMetadata( "1.0-20100309.002023-2" ) ) );
//
// consumer.beginScan( config, new Date() );
// consumer.processFile( TEST_FILE );
// consumer.completeScan();
//
// verify( metadataRepository, never() ).addMetadataFacet( eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
// }
@Test
public void testConsumerArtifactDuplicated() throws Exception {
when(metadataRepository.getArtifactsByChecksum(TEST_REPO, TEST_CHECKSUM)).thenReturn(Arrays.asList(TEST_METADATA, createMetadata("1.0")));
consumer.beginScan(config, new Date());
consumer.processFile(TEST_FILE);
consumer.completeScan();
ArgumentCaptor<RepositoryProblemFacet> argument = ArgumentCaptor.forClass(RepositoryProblemFacet.class);
verify(metadataRepository).addMetadataFacet(eq(TEST_REPO), argument.capture());
RepositoryProblemFacet problem = argument.getValue();
assertProblem(problem);
}
use of org.apache.archiva.metadata.model.facets.RepositoryProblemFacet in project archiva by apache.
the class RepositoryProblemEventListener method addArtifactProblem.
@Override
public void addArtifactProblem(RepositorySession session, String repoId, String namespace, String projectId, String projectVersion, RepositoryStorageMetadataException exception) {
RepositoryProblemFacet problem = new RepositoryProblemFacet();
problem.setMessage(exception.getMessage());
problem.setProject(projectId);
problem.setNamespace(namespace);
problem.setRepositoryId(repoId);
problem.setVersion(projectVersion);
problem.setProblem(exception.getId());
try {
session.getRepository().addMetadataFacet(repoId, problem);
session.markDirty();
} catch (MetadataRepositoryException e) {
log.warn("Unable to add repository problem facets for the version being removed: {}", e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.model.facets.RepositoryProblemFacet in project archiva by apache.
the class DefaultReportRepositoriesService method getHealthReport.
@Override
public List<RepositoryProblemFacet> getHealthReport(String repository, String groupId, int rowCount) throws ArchivaRestServiceException {
RepositorySession repositorySession = repositorySessionFactory.createSession();
try {
List<String> observableRepositories = getObservableRepos();
if (!ALL_REPOSITORIES.equals(repository) && !observableRepositories.contains(repository)) {
throw new ArchivaRestServiceException("${$.i18n.prop('report.repository.illegal-access', " + repository + ")}", "repositoryId", new IllegalAccessException());
}
if (!ALL_REPOSITORIES.equals(repository)) {
observableRepositories = Collections.singletonList(repository);
}
List<RepositoryProblemFacet> problemArtifacts = new ArrayList<>();
MetadataRepository metadataRepository = repositorySession.getRepository();
for (String repoId : observableRepositories) {
for (String name : metadataRepository.getMetadataFacets(repoId, RepositoryProblemFacet.FACET_ID)) {
RepositoryProblemFacet metadataFacet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet(repoId, RepositoryProblemFacet.FACET_ID, name);
if (StringUtils.isEmpty(groupId) || groupId.equals(metadataFacet.getNamespace())) {
problemArtifacts.add(metadataFacet);
}
}
}
return problemArtifacts;
} catch (MetadataRepositoryException e) {
throw new ArchivaRestServiceException(e.getMessage(), e);
} finally {
repositorySession.close();
}
}
use of org.apache.archiva.metadata.model.facets.RepositoryProblemFacet in project archiva by apache.
the class DuplicateArtifactsConsumerTest method testConsumerArtifactDuplicatedButSelfNotInMetadataRepository.
@Test
public void testConsumerArtifactDuplicatedButSelfNotInMetadataRepository() throws Exception {
when(metadataRepository.getArtifactsByChecksum(TEST_REPO, TEST_CHECKSUM)).thenReturn(Arrays.asList(createMetadata("1.0")));
consumer.beginScan(config, new Date());
consumer.processFile(TEST_FILE);
consumer.completeScan();
ArgumentCaptor<RepositoryProblemFacet> argument = ArgumentCaptor.forClass(RepositoryProblemFacet.class);
verify(metadataRepository).addMetadataFacet(eq(TEST_REPO), argument.capture());
RepositoryProblemFacet problem = argument.getValue();
assertProblem(problem);
}
Aggregations