use of org.ops4j.pax.url.mvn.MavenResolver in project karaf by apache.
the class MavenTest method smartRetriesTest.
@Test
public void smartRetriesTest() throws Exception {
karafTestSupport.bundleContext = bundleContext;
final ConfigurationAdmin cm = karafTestSupport.getOsgiService(ConfigurationAdmin.class, 3000);
updateSettings();
awaitMavenResolver(new Runnable() {
@Override
public void run() {
try {
org.osgi.service.cm.Configuration config = cm.getConfiguration("org.ops4j.pax.url.mvn", null);
Dictionary<String, Object> props = config.getProperties();
props.put("org.ops4j.pax.url.mvn.globalChecksumPolicy", "ignore");
props.put("org.ops4j.pax.url.mvn.socket.readTimeout", "2000");
props.put("org.ops4j.pax.url.mvn.connection.retryCount", "0");
props.put("org.ops4j.pax.url.mvn.repositories", "http://localhost:1111/repository@id=r1," + "http://localhost:2222/repository@id=r2," + "http://localhost:3333/repository@id=r3");
config.update(props);
} catch (Exception e) {
fail(e.getMessage());
}
}
});
// grab modified resolver
MavenResolver resolver = karafTestSupport.getOsgiService(MavenResolver.class, 15000);
try {
resolver.resolve("mvn:commons-universalis/commons-universalis/42");
fail("Should fail at first attempt");
} catch (IOException e) {
File f = resolver.resolve("mvn:commons-universalis/commons-universalis/42", e);
byte[] commonsUniversalis = FileUtils.readFileToByteArray(f);
assertThat(commonsUniversalis.length, equalTo(1));
assertThat(commonsUniversalis[0], equalTo((byte) 0x42));
}
}
use of org.ops4j.pax.url.mvn.MavenResolver in project karaf by apache.
the class Builder method doGenerateAssembly.
private void doGenerateAssembly() throws Exception {
LOGGER.info("Generating Karaf assembly: " + homeDirectory);
//
// Create download manager - combination of pax-url-aether and a resolver wrapper that may
// alter the way pax-url-aether resolver works
//
MavenResolver resolver = createMavenResolver();
manager = new CustomDownloadManager(resolver, executor, null, translatedUrls);
this.resolver = new ResolverImpl(new Slf4jResolverLog(LOGGER));
//
// Unzip KARs
//
LOGGER.info("Unzipping kars");
Downloader downloader = manager.createDownloader();
for (String kar : kars.keySet()) {
downloader.download(kar, null);
}
downloader.await();
// stage as the KAR and with the same "add all" flag as the KAR itself
for (String karUri : kars.keySet()) {
LOGGER.info(" processing KAR: " + karUri);
Kar kar = new Kar(manager.getProviders().get(karUri).getFile().toURI());
kar.extract(systemDirectory.toFile(), homeDirectory.toFile());
RepositoryInfo info = kars.get(karUri);
for (URI repositoryUri : kar.getFeatureRepos()) {
LOGGER.info(" found repository: " + repositoryUri);
repositories.put(repositoryUri.toString(), info);
}
}
//
// Load profiles
//
LOGGER.info("Loading profiles from:");
profilesUris.forEach(p -> LOGGER.info(" " + p));
allProfiles = loadExternalProfiles(profilesUris);
if (allProfiles.size() > 0) {
StringBuilder sb = new StringBuilder();
LOGGER.info(" Found profiles: " + allProfiles.keySet().stream().collect(Collectors.joining(", ")));
}
// Generate initial profile to collect overrides and blacklisting instructions
Profile initialProfile = ProfileBuilder.Factory.create("initial").setParents(new ArrayList<>(profiles.keySet())).getProfile();
Profile initialOverlay = Profiles.getOverlay(initialProfile, allProfiles, environment);
Profile initialEffective = Profiles.getEffective(initialOverlay, false);
//
// Handle blacklist - we'll use SINGLE instance of Blacklist for all further downloads
//
blacklist = processBlacklist(initialEffective);
//
// Configure blacklisting and overriding features processor
//
boolean needFeaturesProcessorFileCopy = false;
String existingProcessorDefinitionURI = null;
Path existingProcessorDefinition = etcDirectory.resolve("org.apache.karaf.features.xml");
if (existingProcessorDefinition.toFile().isFile()) {
existingProcessorDefinitionURI = existingProcessorDefinition.toFile().toURI().toString();
LOGGER.info("Found existing features processor configuration: {}", homeDirectory.relativize(existingProcessorDefinition));
}
if (featuresProcessingLocation != null && featuresProcessingLocation.toFile().isFile() && !featuresProcessingLocation.equals(existingProcessorDefinition)) {
if (existingProcessorDefinitionURI != null) {
LOGGER.warn("Explicitly configured {} will be used for features processor configuration.", homeDirectory.relativize(featuresProcessingLocation));
} else {
LOGGER.info("Found features processor configuration: {}", homeDirectory.relativize(featuresProcessingLocation));
}
existingProcessorDefinitionURI = featuresProcessingLocation.toFile().toURI().toString();
// when there are no other (configured via Maven for example) processing instructions (e.g., blacklisting)
// we don't have to generate this file and may take original content
needFeaturesProcessorFileCopy = true;
}
// now we can configure blacklisting features processor which may have already defined (in XML)
// configuration for bundle replacements or feature overrides.
FeaturesProcessorImpl processor = new FeaturesProcessorImpl(existingProcessorDefinitionURI, null, blacklist, new HashSet<>());
// add overrides from initialProfile
Set<String> overrides = processOverrides(initialEffective.getOverrides());
processor.addOverrides(overrides);
//
// Propagate feature installation from repositories
//
LOGGER.info("Loading repositories");
Map<String, Features> karRepositories = loadRepositories(manager, repositories.keySet(), false, processor);
for (String repo : repositories.keySet()) {
RepositoryInfo info = repositories.get(repo);
if (info.addAll) {
LOGGER.info(" adding all non-blacklisted features from repository: " + repo + " (stage: " + info.stage + ")");
for (Feature feature : karRepositories.get(repo).getFeature()) {
if (feature.isBlacklisted()) {
LOGGER.info(" feature {}/{} is blacklisted - skipping.", feature.getId(), feature.getVersion());
} else {
features.put(feature.getId(), info.stage);
}
}
}
}
if (generateConsistencyReport != null) {
File directory = new File(generateConsistencyReport);
if (directory.isDirectory()) {
LOGGER.info("Writing bundle report");
generateConsistencyReport(karRepositories, new File(directory, "bundle-report-full.xml"), true);
generateConsistencyReport(karRepositories, new File(directory, "bundle-report.xml"), false);
Files.copy(getClass().getResourceAsStream("/bundle-report.xslt"), directory.toPath().resolve("bundle-report.xslt"), StandardCopyOption.REPLACE_EXISTING);
}
}
//
// Generate profiles. If user has configured additional profiles, they'll be used as parents
// of the generated ones.
//
Profile startupProfile = generateProfile(Stage.Startup, profiles, repositories, features, bundles);
allProfiles.put(startupProfile.getId(), startupProfile);
// generated startup profile should be used (together with configured startup and boot profiles) as parent
// of the generated boot profile - similar visibility rule (boot stage requires startup stage) is applied
// for repositories and features
profiles.put(startupProfile.getId(), Stage.Boot);
Profile bootProfile = generateProfile(Stage.Boot, profiles, repositories, features, bundles);
allProfiles.put(bootProfile.getId(), bootProfile);
Profile installedProfile = generateProfile(Stage.Installed, profiles, repositories, features, bundles);
allProfiles.put(installedProfile.getId(), installedProfile);
//
// Compute "overlay" profile - a single profile with all parent profiles included (when there's the same
// file in both profiles, parent profile's version has lower priority)
//
ProfileBuilder builder = ProfileBuilder.Factory.create(UUID.randomUUID().toString()).setParents(Arrays.asList(startupProfile.getId(), bootProfile.getId(), installedProfile.getId()));
config.forEach((k, v) -> builder.addConfiguration(Profile.INTERNAL_PID, Profile.CONFIG_PREFIX + k, v));
system.forEach((k, v) -> builder.addConfiguration(Profile.INTERNAL_PID, Profile.SYSTEM_PREFIX + k, v));
// profile with all the parents configured and stage-agnostic blacklisting configuration added
blacklistedRepositoryURIs.forEach(builder::addBlacklistedRepository);
blacklistedFeatureIdentifiers.forEach(builder::addBlacklistedFeature);
blacklistedBundleURIs.forEach(builder::addBlacklistedBundle);
// final profilep
Profile overallProfile = builder.getProfile();
// profile with parents included and "flattened" using inheritance rules (child files overwrite parent
// files and child PIDs are merged with parent PIDs and same properties are taken from child profiles)
Profile overallOverlay = Profiles.getOverlay(overallProfile, allProfiles, environment);
// profile with property placeholders resolved or left unchanged (if there's no property value available,
// so property placeholders are preserved - like ${karaf.base})
Profile overallEffective = Profiles.getEffective(overallOverlay, false);
if (writeProfiles) {
Path profiles = etcDirectory.resolve("profiles");
LOGGER.info("Adding profiles to {}", homeDirectory.relativize(profiles));
allProfiles.forEach((id, profile) -> {
try {
Profiles.writeProfile(profiles, profile);
} catch (IOException e) {
LOGGER.warn("Problem writing profile {}: {}", id, e.getMessage());
}
});
}
manager = new CustomDownloadManager(resolver, executor, overallEffective, translatedUrls);
// Hashtable<String, String> profileProps = new Hashtable<>(overallEffective.getConfiguration(ORG_OPS4J_PAX_URL_MVN_PID));
// final Map<String, String> properties = new HashMap<>();
// properties.put("karaf.default.repository", "system");
// InterpolationHelper.performSubstitution(profileProps, properties::get, false, false, true);
//
// Write config and system properties
//
LOGGER.info("Configuring etc/config.properties and etc/system.properties");
Path configPropertiesPath = etcDirectory.resolve("config.properties");
Properties configProperties = new Properties(configPropertiesPath.toFile());
configProperties.putAll(overallEffective.getConfig());
configProperties.save();
Path systemPropertiesPath = etcDirectory.resolve("system.properties");
Properties systemProperties = new Properties(systemPropertiesPath.toFile());
systemProperties.putAll(overallEffective.getSystem());
systemProperties.save();
//
// Download libraries
//
// TODO: handle karaf 2.x and 3.x libraries
downloader = manager.createDownloader();
LOGGER.info("Downloading libraries for generated profiles");
downloadLibraries(downloader, configProperties, overallEffective.getLibraries(), "");
LOGGER.info("Downloading additional libraries");
downloadLibraries(downloader, configProperties, libraries, "");
downloader.await();
// Reformat clauses
reformatClauses(configProperties, Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA);
reformatClauses(configProperties, Constants.FRAMEWORK_BOOTDELEGATION);
configProperties.save();
//
// Write all configuration files
//
LOGGER.info("Writing configurations");
for (Map.Entry<String, byte[]> config : overallEffective.getFileConfigurations().entrySet()) {
Path configFile = etcDirectory.resolve(config.getKey());
if (Files.exists(configFile)) {
LOGGER.info(" not changing existing config file: {}", homeDirectory.relativize(configFile));
} else {
LOGGER.info(" adding config file: {}", homeDirectory.relativize(configFile));
Files.createDirectories(configFile.getParent());
Files.write(configFile, config.getValue());
}
}
// 'improve' configuration files.
if (propertyEdits != null) {
KarafPropertiesEditor editor = new KarafPropertiesEditor();
editor.setInputEtc(etcDirectory.toFile()).setOutputEtc(etcDirectory.toFile()).setEdits(propertyEdits);
editor.run();
}
if (processor.hasInstructions()) {
Path featuresProcessingXml = etcDirectory.resolve("org.apache.karaf.features.xml");
if (hasOwnInstructions() || overrides.size() > 0) {
// just generate new etc/org.apache.karaf.features.xml file (with external config + builder config)
try (FileOutputStream fos = new FileOutputStream(featuresProcessingXml.toFile())) {
LOGGER.info("Generating features processor configuration: {}", homeDirectory.relativize(featuresProcessingXml));
processor.writeInstructions(fos);
}
} else if (needFeaturesProcessorFileCopy) {
// we may simply copy configured features processor XML configuration
LOGGER.info("Copying features processor configuration: {} -> {}", homeDirectory.relativize(featuresProcessingLocation), homeDirectory.relativize(featuresProcessingXml));
Files.copy(featuresProcessingLocation, featuresProcessingXml, StandardCopyOption.REPLACE_EXISTING);
}
}
//
// Startup stage
//
Profile startupEffective = startupStage(startupProfile, processor);
//
// Boot stage
//
Set<Feature> allBootFeatures = bootStage(bootProfile, startupEffective, processor);
//
// Installed stage
//
installStage(installedProfile, allBootFeatures, processor);
}
use of org.ops4j.pax.url.mvn.MavenResolver in project components by Talend.
the class DefaultComponentConfiguration method unzipMvnArtifact.
private void unzipMvnArtifact(String aConfigtMvnUrlStr, String aConfigFolderPath) throws IOException {
RuntimeUtil.registerMavenUrlHandler();
MavenResolver mavenResolver = MavenResolvers.createMavenResolver(null, ServiceConstants.PID);
File artifactToUnzip = mavenResolver.resolve(aConfigtMvnUrlStr);
try (ZipFile zipFile = new ZipFile(artifactToUnzip)) {
try {
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
File entryDestination = new File(aConfigFolderPath, entry.getName());
if (entry.isDirectory()) {
entryDestination.mkdirs();
} else {
entryDestination.getParentFile().mkdirs();
InputStream in = zipFile.getInputStream(entry);
OutputStream out = new FileOutputStream(entryDestination);
IOUtils.copy(in, out);
IOUtils.closeQuietly(in);
out.close();
}
}
} finally {
zipFile.close();
}
}
}
use of org.ops4j.pax.url.mvn.MavenResolver in project components by Talend.
the class DependenciesReaderTest method createAndDeployArtifact.
private static void createAndDeployArtifact() throws IOException {
// create jar file
File tempFile = File.createTempFile("comps-api-tests", ".jar");
try {
// adds a dep.txt entry
LOG.debug("created temp artifact jar" + tempFile.getAbsolutePath());
ZipOutputStream out = new ZipOutputStream(new FileOutputStream(tempFile));
String depsEntryPath = "META-INF/maven/" + TEST_GROUPID + "/" + TEST_ARTEFACTID + "/dependencies.txt";
ZipEntry e = new ZipEntry(depsEntryPath);
out.putNextEntry(e);
InputStream depTxtStream = DependenciesReaderTest.class.getResourceAsStream("/" + depsEntryPath);
byte[] data = IOUtils.toByteArray(depTxtStream);
out.write(data, 0, data.length);
out.closeEntry();
out.close();
// deploy it
MavenResolver mavenResolver = MavenResolvers.createMavenResolver(null, "foo");
mavenResolver.upload(TEST_GROUPID, TEST_ARTEFACTID, "jar", "jar", TEST_VERSION, tempFile);
tempMavenFilePath = mavenResolver.resolve(TEST_GROUPID, TEST_ARTEFACTID, "jar", "jar", TEST_VERSION).getAbsolutePath();
LOG.debug("artifact deployed:" + tempMavenFilePath);
} finally {
// remove it
tempFile.delete();
}
}
use of org.ops4j.pax.url.mvn.MavenResolver in project components by Talend.
the class JarRuntimeInfoTest method testGetMavenUrlDependencies.
/**
* Test method for {@link org.talend.components.api.component.runtime.JarRuntimeInfo#getMavenUrlDependencies()}.
*
* @throws IOException
*/
@Ignore("we can't use components-api-full-example here because it brings cyclic dependency. Should be fixed by using another test module")
@Test
public void testGetMavenUrlDependencies() throws IOException {
MavenResolver mavenResolver = MavenResolvers.createMavenResolver(null, "foo");
File jarWithDeps = mavenResolver.resolve("mvn:org.talend.components/components-api-full-example/0.1.0");
// the artifact id used to compute the file path is different from the actual artifact ID.
// I don't know why but this does not matter.
JarRuntimeInfo jarRuntimeInfo = new JarRuntimeInfo(jarWithDeps.toURI().toURL(), DependenciesReader.computeDependenciesFilePath("org.talend.components", "components-full-example"), null);
List<URL> mavenUrlDependencies = jarRuntimeInfo.getMavenUrlDependencies();
checkFullExampleDependencies(mavenUrlDependencies);
}
Aggregations