use of org.jboss.shrinkwrap.resolver.api.maven.strategy.MavenResolutionStrategy in project wildfly-swarm by wildfly-swarm.
the class ShrinkwrapArtifactResolvingHelper method resolveAll.
@Override
public Set<ArtifactSpec> resolveAll(final Collection<ArtifactSpec> specs, boolean transitive, boolean defaultExcludes) {
if (specs.isEmpty()) {
return Collections.emptySet();
}
MavenResolutionStrategy transitivityStrategy = (transitive ? TransitiveStrategy.INSTANCE : NonTransitiveStrategy.INSTANCE);
resetListeners();
final MavenResolvedArtifact[] artifacts = withResolver(r -> {
specs.forEach(spec -> r.addDependency(createMavenDependency(spec)));
return r.resolve().using(transitivityStrategy).as(MavenResolvedArtifact.class);
});
return Arrays.stream(artifacts).map(artifact -> {
final MavenCoordinate coord = artifact.getCoordinate();
return new ArtifactSpec(artifact.getScope().toString(), coord.getGroupId(), coord.getArtifactId(), coord.getVersion(), coord.getPackaging().getId(), coord.getClassifier(), artifact.asFile());
}).collect(Collectors.toSet());
}
use of org.jboss.shrinkwrap.resolver.api.maven.strategy.MavenResolutionStrategy in project component-runtime by Talend.
the class BaseSpark method buildSparkHome.
private File buildSparkHome(final Version version) {
final File sparkHome = new File(getRoot(), "spark/");
Stream.of(version.libFolder(), "conf").map(n -> new File(sparkHome, n)).forEach(File::mkdirs);
// deps
final File libFolder = new File(sparkHome, version.libFolder());
final ConfigurableMavenResolverSystem resolver = Maven.configureResolver();
final MavenResolutionStrategy resolutionStrategy = new AcceptScopesStrategy(ScopeType.COMPILE, ScopeType.RUNTIME);
Stream.of("org.apache.spark:spark-core_" + scalaVersion + ":" + sparkVersion, "org.apache.spark:spark-streaming_" + scalaVersion + ":" + sparkVersion).peek(dep -> LOGGER.info("Resolving " + dep + "...")).flatMap(dep -> Stream.of(resolver.resolve(dep).using(resolutionStrategy).asFile())).distinct().forEach(dep -> {
try {
LOGGER.debug("Copying " + dep.getName() + " dependency");
Files.copy(dep.toPath(), new File(libFolder, dep.getName()).toPath(), StandardCopyOption.REPLACE_EXISTING);
} catch (final IOException e) {
fail(e.getMessage());
}
});
if (version == Version.SPARK_1) {
try {
Files.write(new File(sparkHome, "RELEASE").toPath(), "fake release file cause it is tested in 1.6.3".getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE_NEW);
} catch (final IOException e) {
fail(e.getMessage());
}
try (final JarOutputStream file = new JarOutputStream(new FileOutputStream(new File(sparkHome, version.libFolder() + "/spark-assembly-" + sparkVersion + "-hadoop2.6.0.jar")))) {
file.putNextEntry(new ZipEntry("META-INF/marker"));
file.write("just to let spark find the jar".getBytes(StandardCharsets.UTF_8));
} catch (final IOException e) {
fail(e.getMessage());
}
}
if (isWin() && installWinUtils) {
LOGGER.info("Downloading Hadoop winutils");
// ensure hadoop winutils is locatable
final String dll = hadoopBase + "/hadoop-" + hadoopVersion + "/bin/hadoop.dll";
final String exe = hadoopBase + "/hadoop-" + hadoopVersion + "/bin/winutils.exe";
new File(sparkHome, "bin").mkdirs();
Stream.of(dll, exe).forEach(from -> {
final File target = new File(sparkHome, "bin/" + from.substring(from.lastIndexOf('/') + 1));
try {
final URL url = new URL(from);
try (final InputStream stream = url.openStream();
final OutputStream out = new FileOutputStream(target)) {
final byte[] buffer = new byte[8192];
int read;
while ((read = stream.read(buffer)) >= 0) {
out.write(read);
}
} catch (final IOException e) {
throw new IllegalStateException(e);
}
} catch (final MalformedURLException e) {
throw new IllegalArgumentException(e);
}
});
}
return sparkHome;
}
Aggregations