use of org.eclipse.aether.artifact.Artifact in project pinpoint by naver.
the class SharedProcessManager method addTest.
private String addTest(String testId, List<Artifact> artifactList) {
StringBuilder mavenDependencyInfo = new StringBuilder();
mavenDependencyInfo.append(testId);
mavenDependencyInfo.append('=');
for (Artifact artifact : artifactList) {
String str = ArtifactIdUtils.artifactToString(artifact);
if (StringUtils.hasText(str)) {
mavenDependencyInfo.append(str);
mavenDependencyInfo.append(ArtifactIdUtils.ARTIFACT_SEPARATOR);
}
}
return mavenDependencyInfo.toString();
}
use of org.eclipse.aether.artifact.Artifact in project pinpoint by naver.
the class DependencyResolver method resolveDependencySets.
public Map<String, List<Artifact>> resolveDependencySets(String... dependencies) {
List<List<Artifact>> companions = resolve(dependencies);
List<List<List<Artifact>>> xxx = new ArrayList<>();
for (List<Artifact> companion : companions) {
Artifact representative = companion.get(0);
List<Version> versions;
try {
versions = getVersions(representative);
} catch (VersionRangeResolutionException e) {
throw new IllegalArgumentException("Fail to resolve version of: " + representative);
}
if (versions.isEmpty()) {
throw new IllegalArgumentException("No version in the given range: " + representative);
}
List<List<Artifact>> companionVersions = new ArrayList<>(versions.size());
for (Version version : versions) {
List<Artifact> companionVersion = new ArrayList<>(companion.size());
for (Artifact artifact : companion) {
Artifact verArtifact = new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getClassifier(), artifact.getExtension(), version.toString());
companionVersion.add(verArtifact);
}
companionVersions.add(companionVersion);
}
xxx.add(companionVersions);
}
Map<String, List<Artifact>> result = combination(xxx);
return result;
}
use of org.eclipse.aether.artifact.Artifact in project druid by druid-io.
the class PullDependencies method run.
@Override
public void run() {
if (aether == null) {
aether = getAetherClient();
}
final File extensionsDir = new File(extensionsConfig.getDirectory());
final File hadoopDependenciesDir = new File(extensionsConfig.getHadoopDependenciesDir());
try {
if (clean) {
FileUtils.deleteDirectory(extensionsDir);
FileUtils.deleteDirectory(hadoopDependenciesDir);
}
FileUtils.mkdirp(extensionsDir);
FileUtils.mkdirp(hadoopDependenciesDir);
} catch (IOException e) {
log.error(e, "Unable to clear or create extension directory at [%s]", extensionsDir);
throw new RuntimeException(e);
}
log.info("Start pull-deps with local repository [%s] and remote repositories [%s]", localRepository, remoteRepositories);
try {
log.info("Start downloading dependencies for extension coordinates: [%s]", coordinates);
for (String coordinate : coordinates) {
coordinate = coordinate.trim();
final Artifact versionedArtifact = getArtifact(coordinate);
File currExtensionDir = new File(extensionsDir, versionedArtifact.getArtifactId());
createExtensionDirectory(coordinate, currExtensionDir);
downloadExtension(versionedArtifact, currExtensionDir);
}
log.info("Finish downloading dependencies for extension coordinates: [%s]", coordinates);
if (!noDefaultHadoop && hadoopCoordinates.isEmpty()) {
hadoopCoordinates.addAll(TaskConfig.DEFAULT_DEFAULT_HADOOP_COORDINATES);
}
log.info("Start downloading dependencies for hadoop extension coordinates: [%s]", hadoopCoordinates);
for (final String hadoopCoordinate : hadoopCoordinates) {
final Artifact versionedArtifact = getArtifact(hadoopCoordinate);
File currExtensionDir = new File(hadoopDependenciesDir, versionedArtifact.getArtifactId());
createExtensionDirectory(hadoopCoordinate, currExtensionDir);
// add a version folder for hadoop dependency directory
currExtensionDir = new File(currExtensionDir, versionedArtifact.getVersion());
createExtensionDirectory(hadoopCoordinate, currExtensionDir);
downloadExtension(versionedArtifact, currExtensionDir, hadoopExclusions);
}
log.info("Finish downloading dependencies for hadoop extension coordinates: [%s]", hadoopCoordinates);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.eclipse.aether.artifact.Artifact in project druid by druid-io.
the class PullDependencies method downloadExtension.
private void downloadExtension(Artifact versionedArtifact, File toLocation, Dependencies exclusions) {
final CollectRequest collectRequest = new CollectRequest();
collectRequest.setRoot(new Dependency(versionedArtifact, JavaScopes.RUNTIME));
final DependencyRequest dependencyRequest = new DependencyRequest(collectRequest, DependencyFilterUtils.andFilter(DependencyFilterUtils.classpathFilter(JavaScopes.RUNTIME), (node, parents) -> {
String scope = node.getDependency().getScope();
if (scope != null) {
scope = StringUtils.toLowerCase(scope);
if ("provided".equals(scope)) {
return false;
}
if ("test".equals(scope)) {
return false;
}
if ("system".equals(scope)) {
return false;
}
}
if (exclusions.contain(node.getArtifact())) {
return false;
}
for (DependencyNode parent : parents) {
if (exclusions.contain(parent.getArtifact())) {
return false;
}
}
return true;
}));
try {
log.info("Start downloading extension [%s]", versionedArtifact);
final List<Artifact> artifacts = aether.resolveArtifacts(dependencyRequest);
for (Artifact artifact : artifacts) {
if (exclusions.contain(artifact)) {
log.debug("Skipped Artifact[%s]", artifact);
} else {
log.info("Adding file [%s] at [%s]", artifact.getFile().getName(), toLocation.getAbsolutePath());
org.apache.commons.io.FileUtils.copyFileToDirectory(artifact.getFile(), toLocation);
}
}
} catch (Exception e) {
log.error(e, "Unable to resolve artifacts for [%s].", dependencyRequest);
throw new RuntimeException(e);
}
log.info("Finish downloading extension [%s]", versionedArtifact);
}
use of org.eclipse.aether.artifact.Artifact in project druid by druid-io.
the class PullDependenciesTest method getExpectedJarFiles.
private List<File> getExpectedJarFiles(Artifact artifact) {
final String artifactId = artifact.getArtifactId();
final List<String> names = extensionToDependency.get(artifact);
final List<File> expectedJars;
if ("hadoop-client".equals(artifactId)) {
final String version = artifact.getVersion();
expectedJars = names.stream().filter(name -> !HADOOP_CLIENT_VULNERABLE_ARTIFACTIDS.contains(name)).map(name -> new File(StringUtils.format("%s/%s/%s/%s", rootHadoopDependenciesDir, artifactId, version, name + ".jar"))).collect(Collectors.toList());
} else {
expectedJars = names.stream().map(name -> new File(StringUtils.format("%s/%s/%s", rootExtensionsDir, artifactId, name + ".jar"))).collect(Collectors.toList());
}
return expectedJars;
}
Aggregations