use of hudson.model.UpdateSite.Plugin in project plugin-compat-tester by jenkinsci.
the class PluginCompatTester method updateAllDependents.
/**
* Search the dependents of a given plugin to determine if we need to use the bundled version.
* This helps in cases where tests fail due to new insufficient versions as well as more
* accurtely representing the totality of upgraded plugins for provided war files.
*/
private void updateAllDependents(String parent, Plugin dependent, Map<String, VersionNumber> pluginList, Map<String, VersionNumber> adding, Map<String, VersionNumber> replacing, Map<String, Plugin> otherPlugins, List<String> inTest, List<String> toConvertFromTest) {
// Check if this exists with an undesired scope
String pluginName = dependent.name;
if (inTest.contains(pluginName)) {
// This is now required in the compile scope. For example: copyartifact's dependency matrix-project requires junit
System.out.println("Converting " + pluginName + " from the test scope since it was a dependency of " + parent);
toConvertFromTest.add(pluginName);
replacing.put(pluginName, new VersionNumber(dependent.version));
} else {
System.out.println("Adding " + pluginName + " since it was a dependency of " + parent);
adding.put(pluginName, new VersionNumber(dependent.version));
}
// Also check any dependencies
for (Map.Entry<String, String> dependency : dependent.dependencies.entrySet()) {
String depPlugin = dependency.getKey();
if (pluginList.containsKey(depPlugin)) {
// already handled
continue;
}
// We ignore the declared dependency version and go with the bundled version:
Plugin depBundledP = otherPlugins.get(depPlugin);
if (depBundledP != null) {
updateAllDependents(pluginName, depBundledP, pluginList, adding, replacing, otherPlugins, inTest, toConvertFromTest);
}
}
}
use of hudson.model.UpdateSite.Plugin in project plugin-compat-tester by jenkinsci.
the class PluginCompatTester method checkDefinedDeps.
private void checkDefinedDeps(Map<String, VersionNumber> pluginList, Map<String, VersionNumber> adding, Map<String, VersionNumber> replacing, Map<String, Plugin> otherPlugins, List<String> inTest, List<String> toConvertFromTest) {
for (Map.Entry<String, VersionNumber> pluginDep : pluginList.entrySet()) {
String plugin = pluginDep.getKey();
Plugin bundledP = otherPlugins.get(plugin);
if (bundledP != null) {
VersionNumber bundledV = new VersionNumber(bundledP.version);
if (bundledV.isNewerThan(pluginDep.getValue())) {
assert !adding.containsKey(plugin);
replacing.put(plugin, bundledV);
}
// Also check any dependencies, so if we are upgrading cloudbees-folder, we also add an explicit dep on a bundled credentials.
for (Map.Entry<String, String> dependency : bundledP.dependencies.entrySet()) {
String depPlugin = dependency.getKey();
if (pluginList.containsKey(depPlugin)) {
// already handled
continue;
}
// We ignore the declared dependency version and go with the bundled version:
Plugin depBundledP = otherPlugins.get(depPlugin);
if (depBundledP != null) {
updateAllDependents(plugin, depBundledP, pluginList, adding, replacing, otherPlugins, inTest, toConvertFromTest);
}
}
}
}
}
use of hudson.model.UpdateSite.Plugin in project plugin-compat-tester by jenkinsci.
the class ExampleMultiParent method action.
/*
* No check implementation is required because transformedPlugins
* returns your specific list.
*/
/**
* Point to the shared location. Check if this needs to be downloaded.
*/
public Map<String, Object> action(Map<String, Object> moreInfo) throws Exception {
PluginCompatTesterConfig config = (PluginCompatTesterConfig) moreInfo.get("config");
Plugin currentPlugin = (Plugin) moreInfo.get("plugin");
// Determine if we need to run the download; only run for first identified plugin in the series
if (firstRun) {
System.out.println("Preparing for Multimodule checkout.");
// Checkout to the parent directory. All other processes will be on the child directory
File parentPath = new File(config.workDirectory.getAbsolutePath() + "/" + parentName);
System.out.println("Checking out from SCM connection URL : " + parentUrl + " (" + parentName + "-" + currentPlugin.version + ")");
ScmManager scmManager = SCMManagerFactory.getInstance().createScmManager();
ScmRepository repository = scmManager.makeScmRepository(parentUrl);
CheckOutScmResult result = scmManager.checkOut(repository, new ScmFileSet(parentPath), new ScmTag(parentName + "-" + currentPlugin.version));
if (!result.isSuccess()) {
throw new RuntimeException(result.getProviderMessage() + "||" + result.getCommandOutput());
}
}
// Checkout already happened, don't run through again
moreInfo.put("runCheckout", false);
firstRun = false;
// Change the "download"" directory; after download, it's simply used for reference
File childPath = new File(config.workDirectory.getAbsolutePath() + "/" + parentName + "/" + currentPlugin.name);
moreInfo.put("checkoutDir", childPath);
return moreInfo;
}
use of hudson.model.UpdateSite.Plugin in project plugin-compat-tester by jenkinsci.
the class PluginCompatTester method testPlugins.
public PluginCompatReport testPlugins() throws PlexusContainerException, IOException, MavenEmbedderException {
File war = config.getWar();
if (war != null) {
populateSplits(war);
} else {
// TODO find a way to load the local version of jenkins.war acc. to UC metadata
splits = HISTORICAL_SPLITS;
splitCycles = HISTORICAL_SPLIT_CYCLES;
}
PluginCompatTesterHooks pcth = new PluginCompatTesterHooks(config.getHookPrefixes());
// Providing XSL Stylesheet along xml report file
if (config.reportFile != null) {
if (config.isProvideXslReport()) {
File xslFilePath = PluginCompatReport.getXslFilepath(config.reportFile);
FileUtils.copyStreamToFile(new RawInputStreamFacade(getXslTransformerResource().getInputStream()), xslFilePath);
}
}
DataImporter dataImporter = null;
if (config.getGaeBaseUrl() != null && config.getGaeSecurityToken() != null) {
dataImporter = new DataImporter(config.getGaeBaseUrl(), config.getGaeSecurityToken());
}
// Determine the plugin data
// Used to track real plugin groupIds from WARs
HashMap<String, String> pluginGroupIds = new HashMap<String, String>();
UpdateSite.Data data = config.getWar() == null ? extractUpdateCenterData(pluginGroupIds) : scanWAR(config.getWar(), pluginGroupIds);
final Map<String, Plugin> pluginsToCheck;
final List<String> pluginsToInclude = config.getIncludePlugins();
if (data.plugins.isEmpty() && pluginsToInclude != null && !pluginsToInclude.isEmpty()) {
// Update Center returns empty info OR the "-war" option is specified for WAR without bundled plugins
// TODO: Ideally we should do this tweak in any case, so that we can test custom plugins with Jenkins cores before unbundling
// But it will require us to always poll the update center...
System.out.println("WAR file does not contain plugin info, will try to extract it from UC for included plugins");
pluginsToCheck = new HashMap<>(pluginsToInclude.size());
UpdateSite.Data ucData = extractUpdateCenterData(pluginGroupIds);
for (String plugin : pluginsToInclude) {
UpdateSite.Plugin pluginData = ucData.plugins.get(plugin);
if (pluginData != null) {
System.out.println("Adding " + plugin + " to the test scope");
pluginsToCheck.put(plugin, pluginData);
}
}
} else {
pluginsToCheck = data.plugins;
}
if (pluginsToCheck.isEmpty()) {
throw new IOException("List of plugins to check is empty, it is not possible to run PCT");
}
PluginCompatReport report = PluginCompatReport.fromXml(config.reportFile);
SortedSet<MavenCoordinates> testedCores = config.getWar() == null ? generateCoreCoordinatesToTest(data, report) : coreVersionFromWAR(data);
MavenRunner.Config mconfig = new MavenRunner.Config();
mconfig.userSettingsFile = config.getM2SettingsFile();
// TODO REMOVE
mconfig.userProperties.put("failIfNoTests", "false");
mconfig.userProperties.put("argLine", "-XX:MaxPermSize=128m");
String mavenPropertiesFilePath = this.config.getMavenPropertiesFile();
if (StringUtils.isNotBlank(mavenPropertiesFilePath)) {
File file = new File(mavenPropertiesFilePath);
if (file.exists()) {
FileInputStream fileInputStream = null;
try {
fileInputStream = new FileInputStream(file);
Properties properties = new Properties();
properties.load(fileInputStream);
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
mconfig.userProperties.put((String) entry.getKey(), (String) entry.getValue());
}
} finally {
IOUtils.closeQuietly(fileInputStream);
}
} else {
System.out.println("File " + mavenPropertiesFilePath + " not exists");
}
}
SCMManagerFactory.getInstance().start();
for (MavenCoordinates coreCoordinates : testedCores) {
System.out.println("Starting plugin tests on core coordinates : " + coreCoordinates.toString());
for (Plugin plugin : pluginsToCheck.values()) {
if (config.getIncludePlugins() == null || config.getIncludePlugins().contains(plugin.name.toLowerCase())) {
PluginInfos pluginInfos = new PluginInfos(plugin.name, plugin.version, plugin.url);
if (config.getExcludePlugins() != null && config.getExcludePlugins().contains(plugin.name.toLowerCase())) {
System.out.println("Plugin " + plugin.name + " is in excluded plugins => test skipped !");
continue;
}
String errorMessage = null;
TestStatus status = null;
MavenCoordinates actualCoreCoordinates = coreCoordinates;
PluginRemoting remote;
if (localCheckoutProvided() && onlyOnePluginIncluded()) {
// Only one plugin and checkout directory provided
remote = new PluginRemoting(new File(config.getLocalCheckoutDir(), "pom.xml"));
} else if (localCheckoutProvided()) {
// local directory provided for more than one plugin, so each plugin is allocated in localCheckoutDir/plugin-name
// If there is no subdirectory for the plugin, it will be cloned from scm
File pomFile = new File(new File(config.getLocalCheckoutDir(), plugin.name), "pom.xml");
if (pomFile.exists()) {
remote = new PluginRemoting(pomFile);
} else {
remote = new PluginRemoting(plugin.url);
}
} else {
// Only one plugin but checkout directory not provided or
// more than a plugin and no local checkout directory provided
remote = new PluginRemoting(plugin.url);
}
PomData pomData;
try {
pomData = remote.retrievePomData();
System.out.println("detected parent POM " + pomData.parent.toGAV());
if ((pomData.parent.groupId.equals(PluginCompatTesterConfig.DEFAULT_PARENT_GROUP) && pomData.parent.artifactId.equals(PluginCompatTesterConfig.DEFAULT_PARENT_ARTIFACT) || pomData.parent.groupId.equals("org.jvnet.hudson.plugins")) && coreCoordinates.version.matches("1[.][0-9]+[.][0-9]+") && new VersionNumber(coreCoordinates.version).compareTo(new VersionNumber("1.485")) < 0) {
// TODO unless 1.480.3+
System.out.println("Cannot test against " + coreCoordinates.version + " due to lack of deployed POM for " + coreCoordinates.toGAV());
actualCoreCoordinates = new MavenCoordinates(coreCoordinates.groupId, coreCoordinates.artifactId, coreCoordinates.version.replaceFirst("[.][0-9]+$", ""));
}
} catch (Throwable t) {
status = TestStatus.INTERNAL_ERROR;
errorMessage = t.getMessage();
pomData = null;
}
if (!config.isSkipTestCache() && report.isCompatTestResultAlreadyInCache(pluginInfos, actualCoreCoordinates, config.getTestCacheTimeout(), config.getCacheThresholStatus())) {
System.out.println("Cache activated for plugin " + pluginInfos.pluginName + " => test skipped !");
// Don't do anything : we are in the cached interval ! :-)
continue;
}
List<String> warningMessages = new ArrayList<String>();
if (errorMessage == null) {
try {
TestExecutionResult result = testPluginAgainst(actualCoreCoordinates, plugin, mconfig, pomData, pluginsToCheck, pluginGroupIds, pcth);
// If no PomExecutionException, everything went well...
status = TestStatus.SUCCESS;
warningMessages.addAll(result.pomWarningMessages);
} catch (PomExecutionException e) {
if (!e.succeededPluginArtifactIds.contains("maven-compiler-plugin")) {
status = TestStatus.COMPILATION_ERROR;
} else if (!e.succeededPluginArtifactIds.contains("maven-surefire-plugin")) {
status = TestStatus.TEST_FAILURES;
} else {
// Can this really happen ???
status = TestStatus.SUCCESS;
}
errorMessage = e.getErrorMessage();
warningMessages.addAll(e.getPomWarningMessages());
} catch (Error e) {
// Rethrow the error ... something is wrong !
throw e;
} catch (Throwable t) {
status = TestStatus.INTERNAL_ERROR;
errorMessage = t.getMessage();
}
}
File buildLogFile = createBuildLogFile(config.reportFile, plugin.name, plugin.version, actualCoreCoordinates);
String buildLogFilePath = "";
if (buildLogFile.exists()) {
buildLogFilePath = createBuildLogFilePathFor(pluginInfos.pluginName, pluginInfos.pluginVersion, actualCoreCoordinates);
}
PluginCompatResult result = new PluginCompatResult(actualCoreCoordinates, status, errorMessage, warningMessages, buildLogFilePath);
report.add(pluginInfos, result);
// Adding result to GAE
if (dataImporter != null) {
dataImporter.importPluginCompatResult(result, pluginInfos, config.reportFile.getParentFile());
// TODO: import log files
}
if (config.reportFile != null) {
if (!config.reportFile.exists()) {
FileUtils.fileWrite(config.reportFile.getAbsolutePath(), "");
}
report.save(config.reportFile);
}
} else {
System.out.println("Plugin " + plugin.name + " not in included plugins => test skipped !");
}
}
}
// Generating HTML report if needed
if (config.reportFile != null) {
if (config.isGenerateHtmlReport()) {
generateHtmlReportFile();
}
}
return report;
}
use of hudson.model.UpdateSite.Plugin in project plugin-compat-tester by jenkinsci.
the class PluginCompatTester method addSplitPluginDependencies.
private void addSplitPluginDependencies(String thisPlugin, MavenRunner.Config mconfig, File pluginCheckoutDir, MavenPom pom, Map<String, Plugin> otherPlugins, Map<String, String> pluginGroupIds, String coreVersion) throws PomExecutionException, IOException {
File tmp = File.createTempFile("dependencies", ".log");
VersionNumber coreDep = null;
Map<String, VersionNumber> pluginDeps = new HashMap<String, VersionNumber>();
Map<String, VersionNumber> pluginDepsTest = new HashMap<String, VersionNumber>();
try {
runner.run(mconfig, pluginCheckoutDir, tmp, "dependency:resolve");
Reader r = new FileReader(tmp);
try {
BufferedReader br = new BufferedReader(r);
Pattern p = Pattern.compile("\\[INFO\\] ([^:]+):([^:]+):([a-z-]+):(([^:]+):)?([^:]+):(provided|compile|runtime|system)");
Pattern p2 = Pattern.compile("\\[INFO\\] ([^:]+):([^:]+):([a-z-]+):(([^:]+):)?([^:]+):(test)");
String line;
while ((line = br.readLine()) != null) {
Matcher m = p.matcher(line);
Matcher m2 = p2.matcher(line);
String groupId;
String artifactId;
VersionNumber version;
if (!m.matches() && !m2.matches()) {
continue;
} else if (m.matches()) {
groupId = m.group(1);
artifactId = m.group(2);
try {
version = new VersionNumber(m.group(6));
} catch (IllegalArgumentException x) {
// OK, some other kind of dep, just ignore
continue;
}
} else {
// m2.matches()
groupId = m2.group(1);
artifactId = m2.group(2);
try {
version = new VersionNumber(m2.group(6));
} catch (IllegalArgumentException x) {
// OK, some other kind of dep, just ignore
continue;
}
}
if (groupId.equals("org.jenkins-ci.main") && artifactId.equals("jenkins-core")) {
coreDep = version;
} else if (groupId.equals("org.jenkins-ci.plugins")) {
if (m2.matches()) {
pluginDepsTest.put(artifactId, version);
} else {
pluginDeps.put(artifactId, version);
}
} else if (groupId.equals("org.jenkins-ci.main") && artifactId.equals("maven-plugin")) {
if (m2.matches()) {
pluginDepsTest.put(artifactId, version);
} else {
pluginDeps.put(artifactId, version);
}
} else if (groupId.equals(pluginGroupIds.get(artifactId))) {
if (m2.matches()) {
pluginDepsTest.put(artifactId, version);
} else {
pluginDeps.put(artifactId, version);
}
}
}
} finally {
r.close();
}
} finally {
tmp.delete();
}
System.out.println("Analysis: coreDep=" + coreDep + " pluginDeps=" + pluginDeps + " pluginDepsTest=" + pluginDepsTest);
if (coreDep != null) {
Map<String, VersionNumber> toAdd = new HashMap<String, VersionNumber>();
Map<String, VersionNumber> toReplace = new HashMap<String, VersionNumber>();
Map<String, VersionNumber> toAddTest = new HashMap<String, VersionNumber>();
Map<String, VersionNumber> toReplaceTest = new HashMap<String, VersionNumber>();
for (String split : splits) {
String[] pieces = split.split(" ");
String plugin = pieces[0];
if (splitCycles.contains(thisPlugin + ' ' + plugin)) {
System.out.println("Skipping implicit dep " + thisPlugin + " → " + plugin);
continue;
}
VersionNumber splitPoint = new VersionNumber(pieces[1]);
VersionNumber declaredMinimum = new VersionNumber(pieces[2]);
if (coreDep.compareTo(splitPoint) < 0 && new VersionNumber(coreVersion).compareTo(splitPoint) >= 0 && !pluginDeps.containsKey(plugin)) {
Plugin bundledP = otherPlugins.get(plugin);
if (bundledP != null) {
VersionNumber bundledV;
try {
bundledV = new VersionNumber(bundledP.version);
} catch (NumberFormatException x) {
// TODO apparently this does not handle `1.0-beta-1` and the like?!
System.out.println("Skipping unparseable dep on " + bundledP.name + ": " + bundledP.version);
continue;
}
if (bundledV.isNewerThan(declaredMinimum)) {
toAdd.put(plugin, bundledV);
continue;
}
}
toAdd.put(plugin, declaredMinimum);
}
}
List<String> convertFromTestDep = new ArrayList<String>();
checkDefinedDeps(pluginDeps, toAdd, toReplace, otherPlugins, new ArrayList<>(pluginDepsTest.keySet()), convertFromTestDep);
pluginDepsTest.putAll(difference(pluginDepsTest, toAdd));
pluginDepsTest.putAll(difference(pluginDepsTest, toReplace));
checkDefinedDeps(pluginDepsTest, toAddTest, toReplaceTest, otherPlugins);
// Could contain transitive dependencies which were part of the plugin's dependencies or to be added
toAddTest = difference(pluginDeps, toAddTest);
toAddTest = difference(toAdd, toAddTest);
if (!toAdd.isEmpty() || !toReplace.isEmpty() || !toAddTest.isEmpty() || !toReplaceTest.isEmpty()) {
System.out.println("Adding/replacing plugin dependencies for compatibility: " + toAdd + " " + toReplace + "\nFor test: " + toAddTest + " " + toReplaceTest);
pom.addDependencies(toAdd, toReplace, toAddTest, toReplaceTest, coreDep, pluginGroupIds, convertFromTestDep);
}
}
}
Aggregations