use of org.eclipse.tycho.p2.target.facade.TargetDefinition.Repository in project tycho by eclipse.
the class BaselineValidator method validateAndReplace.
public Map<String, IP2Artifact> validateAndReplace(MavenProject project, MojoExecution execution, Map<String, IP2Artifact> reactorMetadata, List<Repository> baselineRepositories, BaselineMode baselineMode, BaselineReplace baselineReplace) throws IOException, MojoExecutionException {
Map<String, IP2Artifact> result = reactorMetadata;
if (baselineMode != disable && baselineRepositories != null && !baselineRepositories.isEmpty()) {
List<MavenRepositoryLocation> _repositories = new ArrayList<>();
for (Repository repository : baselineRepositories) {
if (repository.getUrl() != null) {
_repositories.add(new MavenRepositoryLocation(repository.getId(), repository.getUrl()));
}
}
File baselineBasedir = new File(project.getBuild().getDirectory(), "baseline");
BaselineService baselineService = getService(BaselineService.class);
Map<String, IP2Artifact> baselineMetadata = baselineService.getProjectBaseline(_repositories, reactorMetadata, baselineBasedir);
if (baselineMetadata != null) {
CompoundArtifactDelta delta = getDelta(baselineService, baselineMetadata, reactorMetadata, execution);
if (delta != null) {
if (System.getProperties().containsKey("tycho.debug.artifactcomparator")) {
File logdir = new File(project.getBuild().getDirectory(), "artifactcomparison");
log.info("Artifact comparison detailed log directory " + logdir.getAbsolutePath());
for (Map.Entry<String, ArtifactDelta> classifier : delta.getMembers().entrySet()) {
if (classifier.getValue() instanceof CompoundArtifactDelta) {
((CompoundArtifactDelta) classifier.getValue()).writeDetails(new File(logdir, classifier.getKey()));
}
}
}
if (baselineMode == fail || (baselineMode == failCommon && !isMissingOnlyDelta(delta))) {
throw new MojoExecutionException(delta.getDetailedMessage());
} else {
log.warn(project.toString() + ": " + delta.getDetailedMessage());
}
}
if (baselineReplace != none) {
result = new LinkedHashMap<>();
// replace reactor artifacts with baseline
ArrayList<String> replaced = new ArrayList<>();
for (Map.Entry<String, IP2Artifact> artifact : baselineMetadata.entrySet()) {
File baseLineFile = artifact.getValue().getLocation();
String classifier = artifact.getKey();
File reactorFile = reactorMetadata.get(classifier).getLocation();
if (baseLineFile.isFile() && baseLineFile.length() == 0L) {
// workaround for possibly corrupted download - bug 484003
log.error("baseline file " + baseLineFile.getAbsolutePath() + " is empty. Will not replace " + reactorFile);
} else {
FileUtils.copyFile(baseLineFile, reactorFile);
result.put(classifier, artifact.getValue());
if (classifier != null) {
replaced.add(classifier);
}
}
}
// un-attach and delete artifacts present in reactor but not in baseline
ArrayList<String> removed = new ArrayList<>();
ArrayList<String> inconsistent = new ArrayList<>();
for (Map.Entry<String, IP2Artifact> entry : reactorMetadata.entrySet()) {
String classifier = entry.getKey();
IP2Artifact artifact = entry.getValue();
if (classifier == null || artifact == null) {
continue;
}
if (baselineReplace == all && !baselineMetadata.containsKey(classifier)) {
List<Artifact> attachedArtifacts = project.getAttachedArtifacts();
ListIterator<Artifact> iterator = attachedArtifacts.listIterator();
while (iterator.hasNext()) {
if (classifier.equals(iterator.next().getClassifier())) {
iterator.remove();
break;
}
}
artifact.getLocation().delete();
removed.add(classifier);
} else {
inconsistent.add(classifier);
result.put(classifier, artifact);
}
}
if (log.isInfoEnabled()) {
StringBuilder msg = new StringBuilder();
msg.append(project.toString());
msg.append("\n The main artifact has been replaced with the baseline version.\n");
if (!replaced.isEmpty()) {
msg.append(" The following attached artifacts have been replaced with the baseline version: ");
msg.append(replaced.toString());
msg.append("\n");
}
if (!removed.isEmpty()) {
msg.append(" The following attached artifacts are not present in the baseline and have been removed: ");
msg.append(removed.toString());
msg.append("\n");
}
log.info(msg.toString());
}
}
} else {
log.info("No baseline version " + project);
}
}
return result;
}
use of org.eclipse.tycho.p2.target.facade.TargetDefinition.Repository in project tycho by eclipse.
the class VerifyIntegrityRepositoryMojo method execute.
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
File repositoryDir = getBuildDirectory().getChild("repository");
logger.info("Verifying p2 repositories in " + repositoryDir);
VerifierService verifier = p2.getService(VerifierService.class);
URI repositoryUri = repositoryDir.toURI();
try {
if (!verifier.verify(repositoryUri, repositoryUri, getBuildDirectory())) {
throw new MojoFailureException("The repository is invalid.");
}
} catch (FacadeException e) {
throw new MojoExecutionException("Verification failed", e);
}
}
use of org.eclipse.tycho.p2.target.facade.TargetDefinition.Repository in project tycho by eclipse.
the class TargetPlatformBundlePublisherTest method testPomDependencyOnBundle.
@Test
public void testPomDependencyOnBundle() throws Exception {
String bundleId = "org.eclipse.osgi";
String bundleVersion = "3.5.2.R35x_v20100126";
FileUtils.copyDirectory(resourceFile("platformbuilder/pom-dependencies/bundle-repo"), localRepositoryRoot);
File bundleFile = new File(localRepositoryRoot, RepositoryLayoutHelper.getRelativePath(GROUP_ID, ARTIFACT_ID, VERSION, null, "jar"));
IArtifactFacade bundleArtifact = new ArtifactMock(bundleFile, GROUP_ID, ARTIFACT_ID, VERSION, "jar");
IInstallableUnit publishedUnit = subject.attemptToPublishBundle(bundleArtifact);
assertThat(publishedUnit, is(unit(bundleId, bundleVersion)));
assertThat(publishedUnit.getProperties(), containsGAV(GROUP_ID, ARTIFACT_ID, VERSION));
assertThat(publishedUnit.getArtifacts().size(), is(1));
IArtifactKey referencedArtifact = publishedUnit.getArtifacts().iterator().next();
IRawArtifactProvider artifactRepo = subject.getArtifactRepoOfPublishedBundles();
assertThat(artifactRepo, contains(referencedArtifact));
IArtifactDescriptor[] artifactDescriptors = artifactRepo.getArtifactDescriptors(referencedArtifact);
assertThat(artifactDescriptors.length, is(1));
assertThat(artifactDescriptors[0].getProperties(), containsGAV(GROUP_ID, ARTIFACT_ID, VERSION));
assertThat(artifactDescriptors[0].getProperties(), hasProperty("download.md5", "6303323acc98658c0fed307c84db4411"));
// test that reading the artifact succeeds (because the way it is added to the repository is a bit special)
assertThat(artifactMD5Of(referencedArtifact, artifactRepo), is("6303323acc98658c0fed307c84db4411"));
}
use of org.eclipse.tycho.p2.target.facade.TargetDefinition.Repository in project tycho by eclipse.
the class TargetDefinitionResolverExecutionEnvironmentTest method testRestrictedExecutionEnvironment.
@Test
public void testRestrictedExecutionEnvironment() throws Exception {
subject = targetResolverForEE("CDC-1.0/Foundation-1.0");
TargetDefinition definition = definitionWith(new AlternatePackageProviderLocationStub());
Collection<IInstallableUnit> units = subject.resolveContent(definition).getUnits();
// expect that resolver included a bundle providing org.w3c.dom (here javax.xml)...
assertThat(units, hasItem(unit("javax.xml", "0.0.1.SNAPSHOT")));
// ... and did not match the import against the "a.jre" IU also in the repository
assertThat(units, not(hasItem(unitWithId("a.jre"))));
}
use of org.eclipse.tycho.p2.target.facade.TargetDefinition.Repository in project tycho by eclipse.
the class PomDependencyProcessor method collectPomDependencies.
PomDependencyCollector collectPomDependencies(MavenProject project, Collection<Artifact> transitivePomDependencies) {
final TychoRepositoryIndex p2ArtifactsInLocalRepo = localRepoIndices.getArtifactsIndex();
PomDependencyCollector result = resolverFactory.newPomDependencyCollector();
result.setProjectLocation(project.getBasedir());
for (Artifact artifact : transitivePomDependencies) {
if (Artifact.SCOPE_SYSTEM.equals(artifact.getScope())) {
// ignore
continue;
}
P2DataArtifacts p2Data = new P2DataArtifacts(artifact);
p2Data.resolve(session, project.getRemoteArtifactRepositories());
if (p2Data.p2MetadataXml.isAvailable() && p2Data.p2ArtifactsXml.isAvailable()) {
/*
* The POM dependency has (probably) been built by Tycho, so we can re-use the
* existing p2 data in the target platform. The data is stored in the attached
* artifacts p2metadata.xml and p2artifacts.xml, which are now present in the local
* Maven repository.
*/
if (logger.isDebugEnabled()) {
logger.debug("P2TargetPlatformResolver: Using existing metadata of " + artifact.toString());
}
result.addArtifactWithExistingMetadata(new ArtifactFacade(artifact), new ArtifactFacade(p2Data.p2MetadataXml.artifact));
/*
* Since the p2artifacts.xml exists on disk, we can add the artifact to the (global)
* p2 artifact repository view of local Maven repository. Then, the artifact is
* available in the build.
*/
// TODO this should happen in resolution context
p2ArtifactsInLocalRepo.addGav(new GAV(artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion()));
} else if (!p2Data.p2MetadataXml.isAvailable() && !p2Data.p2ArtifactsXml.isAvailable()) {
/*
* The POM dependency has not been built by Tycho. If the dependency is a bundle,
* run the p2 bundle publisher on it and add the result to the resolution context.
*/
if (logger.isDebugEnabled()) {
logger.debug("P2resolver.addMavenArtifact " + artifact.toString());
}
result.publishAndAddArtifactIfBundleArtifact(new ArtifactFacade(artifact));
} else {
failDueToPartialP2Data(artifact, p2Data);
}
}
return result;
}
Aggregations