use of org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem in project wildfly-swarm by wildfly-swarm.
the class Main method getResolvingHelper.
private static ArtifactResolvingHelper getResolvingHelper(final List<String> repos) {
final ConfigurableMavenResolverSystem resolver = Maven.configureResolver().withMavenCentralRepo(true).withRemoteRepo(MavenRemoteRepositories.createRemoteRepository("jboss-public-repository-group", "https://repository.jboss.org/nexus/content/groups/public/", "default").setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE).setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER));
repos.forEach(r -> resolver.withRemoteRepo(MavenRemoteRepositories.createRemoteRepository(r, r, "default").setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE).setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER)));
return new ShrinkwrapArtifactResolvingHelper(resolver);
}
use of org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem in project component-runtime by Talend.
the class Dependencies method resolve.
public static URL[] resolve(final MavenDependency dep) {
return CACHE.computeIfAbsent(dep, d -> {
final ConfigurableMavenResolverSystem resolver = Maven.configureResolver().withClassPathResolution(true).workOffline(Boolean.getBoolean("talend.component.junit.maven.offline"));
REPOSITORIES.forEach(resolver::withRemoteRepo);
resolver.addDependency(dep);
return Stream.of(resolver.resolve().using(STRATEGY).asFile()).distinct().map(f -> {
try {
return f.toURI().toURL();
} catch (final MalformedURLException e) {
throw new IllegalStateException(e);
}
}).toArray(URL[]::new);
});
}
use of org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem in project bookkeeper by apache.
the class MavenClassLoader method createClassLoader.
private static MavenClassLoader createClassLoader(ConfigurableMavenResolverSystem resolver, String mainArtifact) throws Exception {
Optional<String> slf4jVersion = Arrays.stream(resolver.resolve(mainArtifact).withTransitivity().asResolvedArtifact()).filter((a) -> a.getCoordinate().getGroupId().equals("org.slf4j") && a.getCoordinate().getArtifactId().equals("slf4j-log4j12")).map((a) -> a.getCoordinate().getVersion()).findFirst();
List<MavenDependency> deps = Lists.newArrayList(MavenDependencies.createDependency(mainArtifact, ScopeType.COMPILE, false, MavenDependencies.createExclusion("org.slf4j:slf4j-log4j12"), MavenDependencies.createExclusion("log4j:log4j")));
if (slf4jVersion.isPresent()) {
deps.add(MavenDependencies.createDependency("org.slf4j:slf4j-simple:" + slf4jVersion.get(), ScopeType.COMPILE, false));
}
File[] files = resolver.addDependencies(deps.toArray(new MavenDependency[0])).resolve().withTransitivity().asFile();
URLClassLoader cl = AccessController.doPrivileged(new PrivilegedAction<URLClassLoader>() {
@Override
public URLClassLoader run() {
return new URLClassLoader(Arrays.stream(files).map((f) -> {
try {
return f.toURI().toURL();
} catch (Throwable t) {
throw new RuntimeException(t);
}
}).toArray(URL[]::new), ClassLoader.getSystemClassLoader());
}
});
return new MavenClassLoader(cl);
}
use of org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem in project wildfly-swarm by wildfly-swarm.
the class ShrinkwrapArtifactResolvingHelper method defaultInstance.
public static ShrinkwrapArtifactResolvingHelper defaultInstance() {
return INSTANCE.updateAndGet(e -> {
if (e != null) {
return e;
}
MavenRemoteRepository jbossPublic = MavenRemoteRepositories.createRemoteRepository("jboss-public-repository-group", "https://repository.jboss.org/nexus/content/groups/public/", "default");
jbossPublic.setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE);
jbossPublic.setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER);
MavenRemoteRepository gradleTools = MavenRemoteRepositories.createRemoteRepository("gradle", "http://repo.gradle.org/gradle/libs-releases-local", "default");
gradleTools.setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE);
gradleTools.setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER);
Boolean offline = Boolean.valueOf(System.getProperty("swarm.resolver.offline", "false"));
final ConfigurableMavenResolverSystem resolver = Maven.configureResolver().withMavenCentralRepo(true).withRemoteRepo(jbossPublic).withRemoteRepo(gradleTools).workOffline(offline);
final String additionalRepos = System.getProperty(SwarmInternalProperties.BUILD_REPOS);
if (additionalRepos != null) {
Arrays.asList(additionalRepos.split(",")).forEach(r -> {
MavenRemoteRepository repo = MavenRemoteRepositories.createRemoteRepository(r, r, "default");
repo.setChecksumPolicy(MavenChecksumPolicy.CHECKSUM_POLICY_IGNORE);
repo.setUpdatePolicy(MavenUpdatePolicy.UPDATE_POLICY_NEVER);
resolver.withRemoteRepo(repo);
});
}
ShrinkwrapArtifactResolvingHelper helper = new ShrinkwrapArtifactResolvingHelper(resolver);
helper.session().setCache(new SimpleRepositoryCache());
helper.session().setUpdatePolicy(RepositoryPolicy.UPDATE_POLICY_DAILY);
helper.session().setChecksumPolicy(RepositoryPolicy.CHECKSUM_POLICY_IGNORE);
return helper;
});
}
use of org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem in project component-runtime by Talend.
the class BaseSpark method buildSparkHome.
private File buildSparkHome(final Version version) {
final File sparkHome = new File(getRoot(), "spark/");
Stream.of(version.libFolder(), "conf").map(n -> new File(sparkHome, n)).forEach(File::mkdirs);
// deps
final File libFolder = new File(sparkHome, version.libFolder());
final ConfigurableMavenResolverSystem resolver = Maven.configureResolver();
final MavenResolutionStrategy resolutionStrategy = new AcceptScopesStrategy(ScopeType.COMPILE, ScopeType.RUNTIME);
Stream.of("org.apache.spark:spark-core_" + scalaVersion + ":" + sparkVersion, "org.apache.spark:spark-streaming_" + scalaVersion + ":" + sparkVersion).peek(dep -> LOGGER.info("Resolving " + dep + "...")).flatMap(dep -> Stream.of(resolver.resolve(dep).using(resolutionStrategy).asFile())).distinct().forEach(dep -> {
try {
LOGGER.debug("Copying " + dep.getName() + " dependency");
Files.copy(dep.toPath(), new File(libFolder, dep.getName()).toPath(), StandardCopyOption.REPLACE_EXISTING);
} catch (final IOException e) {
fail(e.getMessage());
}
});
if (version == Version.SPARK_1) {
try {
Files.write(new File(sparkHome, "RELEASE").toPath(), "fake release file cause it is tested in 1.6.3".getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE_NEW);
} catch (final IOException e) {
fail(e.getMessage());
}
try (final JarOutputStream file = new JarOutputStream(new FileOutputStream(new File(sparkHome, version.libFolder() + "/spark-assembly-" + sparkVersion + "-hadoop2.6.0.jar")))) {
file.putNextEntry(new ZipEntry("META-INF/marker"));
file.write("just to let spark find the jar".getBytes(StandardCharsets.UTF_8));
} catch (final IOException e) {
fail(e.getMessage());
}
}
if (isWin() && installWinUtils) {
LOGGER.info("Downloading Hadoop winutils");
// ensure hadoop winutils is locatable
final String dll = hadoopBase + "/hadoop-" + hadoopVersion + "/bin/hadoop.dll";
final String exe = hadoopBase + "/hadoop-" + hadoopVersion + "/bin/winutils.exe";
new File(sparkHome, "bin").mkdirs();
Stream.of(dll, exe).forEach(from -> {
final File target = new File(sparkHome, "bin/" + from.substring(from.lastIndexOf('/') + 1));
try {
final URL url = new URL(from);
try (final InputStream stream = url.openStream();
final OutputStream out = new FileOutputStream(target)) {
final byte[] buffer = new byte[8192];
int read;
while ((read = stream.read(buffer)) >= 0) {
out.write(read);
}
} catch (final IOException e) {
throw new IllegalStateException(e);
}
} catch (final MalformedURLException e) {
throw new IllegalArgumentException(e);
}
});
}
return sparkHome;
}
Aggregations