use of io.cdap.cdap.api.artifact.InvalidArtifactRangeException in project cdap by caskdata.
the class ArtifactSelectorProvider method getArtifactSelector.
/**
* Gets the corresponding {@link ArtifactSelector} for this config.
* Validates that any given scope, name, and version are all valid or null. The scope must be an
* {@link ArtifactScope}, the version must be an {@link ArtifactVersion}, and the name only contains
* alphanumeric, '-', or '_'. Also checks that at least one field is non-null.
*
* @return an {@link ArtifactSelector} using these config settings
* @throws IllegalArgumentException if any one of the fields are invalid
*/
private ArtifactSelector getArtifactSelector(ArtifactSelectorConfig config) {
String name = config.getName();
if (name != null && !nameMatcher.matchesAllOf(name)) {
throw new IllegalArgumentException(String.format("'%s' is an invalid artifact name. " + "Must contain only alphanumeric, '-', '.', or '_' characters.", name));
}
String version = config.getVersion();
ArtifactVersionRange range;
try {
range = version == null ? null : ArtifactVersionRange.parse(version);
} catch (InvalidArtifactRangeException e) {
throw new IllegalArgumentException(String.format("%s is an invalid artifact version." + "Must be an exact version or a version range " + "with a lower and upper bound.", version));
}
String scope = config.getScope();
ArtifactScope artifactScope = scope == null ? null : ArtifactScope.valueOf(scope.toUpperCase());
return new ArtifactSelector(artifactScope, name, range);
}
use of io.cdap.cdap.api.artifact.InvalidArtifactRangeException in project cdap by caskdata.
the class ArtifactHttpHandler method parseExtendsHeader.
// find out if this artifact extends other artifacts. If so, there will be a header like
// 'Artifact-Extends: <name>[<lowerversion>,<upperversion>]/<name>[<lowerversion>,<upperversion>]:
// for example: 'Artifact-Extends: etl-batch[1.0.0,2.0.0]/etl-realtime[1.0.0:3.0.0]
private Set<ArtifactRange> parseExtendsHeader(NamespaceId namespace, String extendsHeader) throws BadRequestException {
Set<ArtifactRange> parentArtifacts = Sets.newHashSet();
if (extendsHeader != null && !extendsHeader.isEmpty()) {
for (String parent : Splitter.on('/').split(extendsHeader)) {
parent = parent.trim();
ArtifactRange range;
// try parsing it as a namespaced range like system:etl-batch[1.0.0,2.0.0)
try {
range = ArtifactRanges.parseArtifactRange(parent);
// only support extending an artifact that is in the same namespace, or system namespace
if (!NamespaceId.SYSTEM.getNamespace().equals(range.getNamespace()) && !namespace.getNamespace().equals(range.getNamespace())) {
throw new BadRequestException(String.format("Parent artifact %s must be in the same namespace or a system artifact.", parent));
}
} catch (InvalidArtifactRangeException e) {
// if this failed, try parsing as a non-namespaced range like etl-batch[1.0.0,2.0.0)
try {
range = ArtifactRanges.parseArtifactRange(namespace.getNamespace(), parent);
} catch (InvalidArtifactRangeException e1) {
throw new BadRequestException(String.format("Invalid artifact range %s: %s", parent, e1.getMessage()));
}
}
parentArtifacts.add(range);
}
}
return parentArtifacts;
}
use of io.cdap.cdap.api.artifact.InvalidArtifactRangeException in project cdap by caskdata.
the class ArtifactStoreTest method testGetPlugins.
@Test
public void testGetPlugins() throws Exception {
ArtifactRange parentArtifacts = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0"));
// we have 2 plugins of type A and 2 plugins of type B
PluginClass pluginA1 = PluginClass.builder().setName("p1").setType("A").setDescription("desc").setClassName("c.p1").setConfigFieldName("cfg").setProperties(ImmutableMap.of("threshold", new PluginPropertyField("thresh", "description", "double", true, false), "retry", new PluginPropertyField("retries", "description", "int", false, false))).build();
PluginClass pluginA2 = PluginClass.builder().setName("p2").setType("A").setDescription("desc").setClassName("c.p2").setConfigFieldName("conf").setProperties(ImmutableMap.of("stream", new PluginPropertyField("stream", "description", "string", true, false))).build();
PluginClass pluginB1 = PluginClass.builder().setName("p1").setType("B").setDescription("desc").setClassName("c.p1").setConfigFieldName("cfg").setProperties(ImmutableMap.of("createIfNotExist", new PluginPropertyField("createIfNotExist", "desc", "boolean", false, false))).build();
PluginClass pluginB2 = PluginClass.builder().setName("p2").setType("B").setDescription("desc").setClassName("c.p2").setConfigFieldName("stuff").setProperties(ImmutableMap.of("numer", new PluginPropertyField("numerator", "description", "double", true, false), "denom", new PluginPropertyField("denominator", "description", "double", true, false))).build();
// add artifacts
// not interested in artifact contents for this test, using some dummy value
String contents = "0";
// write parent
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, contents);
// artifact artifactX-1.0.0 contains plugin A1
Id.Artifact artifactXv100 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "1.0.0");
ArtifactMeta metaXv100 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginA1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactXv100, metaXv100, contents);
ArtifactDescriptor artifactXv100Info = artifactStore.getArtifact(artifactXv100).getDescriptor();
// artifact artifactX-1.1.0 contains plugin A1
Id.Artifact artifactXv110 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "1.1.0");
ArtifactMeta metaXv110 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginA1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactXv110, metaXv110, contents);
ArtifactDescriptor artifactXv110Info = artifactStore.getArtifact(artifactXv110).getDescriptor();
// artifact artifactX-2.0.0 contains plugins A1 and A2
Id.Artifact artifactXv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "2.0.0");
ArtifactMeta metaXv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginA2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactXv200, metaXv200, contents);
ArtifactDescriptor artifactXv200Info = artifactStore.getArtifact(artifactXv200).getDescriptor();
// artifact artifactY-1.0.0 contains plugin B1
Id.Artifact artifactYv100 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactY", "1.0.0");
ArtifactMeta metaYv100 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginB1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactYv100, metaYv100, contents);
ArtifactDescriptor artifactYv100Info = artifactStore.getArtifact(artifactYv100).getDescriptor();
// artifact artifactY-2.0.0 contains plugin B2
Id.Artifact artifactYv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactY", "2.0.0");
ArtifactMeta metaYv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginB2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactYv200, metaYv200, contents);
ArtifactDescriptor artifactYv200Info = artifactStore.getArtifact(artifactYv200).getDescriptor();
// artifact artifactZ-1.0.0 contains plugins A1 and B1
Id.Artifact artifactZv100 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactZ", "1.0.0");
ArtifactMeta metaZv100 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginB1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactZv100, metaZv100, contents);
ArtifactDescriptor artifactZv100Info = artifactStore.getArtifact(artifactZv100).getDescriptor();
// artifact artifactZ-2.0.0 contains plugins A1, A2, B1, and B2
Id.Artifact artifactZv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactZ", "2.0.0");
ArtifactMeta metaZv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginA2, pluginB1, pluginB2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactZv200, metaZv200, contents);
ArtifactDescriptor artifactZv200Info = artifactStore.getArtifact(artifactZv200).getDescriptor();
// test getting all plugins in the namespace
Map<ArtifactDescriptor, Set<PluginClass>> expected = Maps.newHashMap();
expected.put(artifactXv100Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv110Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv200Info, ImmutableSet.of(pluginA1, pluginA2));
expected.put(artifactYv100Info, ImmutableSet.of(pluginB1));
expected.put(artifactYv200Info, ImmutableSet.of(pluginB2));
expected.put(artifactZv100Info, ImmutableSet.of(pluginA1, pluginB1));
expected.put(artifactZv200Info, ImmutableSet.of(pluginA1, pluginA2, pluginB1, pluginB2));
Map<ArtifactDescriptor, Set<PluginClass>> actual = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId);
Assert.assertEquals(expected, actual);
// test getting all plugins by namespace and type
// get all of type A
expected = Maps.newHashMap();
expected.put(artifactXv100Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv110Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv200Info, ImmutableSet.of(pluginA1, pluginA2));
expected.put(artifactZv100Info, ImmutableSet.of(pluginA1));
expected.put(artifactZv200Info, ImmutableSet.of(pluginA1, pluginA2));
actual = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A");
Assert.assertEquals(expected, actual);
// get all of type B
expected = Maps.newHashMap();
expected.put(artifactYv100Info, ImmutableSet.of(pluginB1));
expected.put(artifactYv200Info, ImmutableSet.of(pluginB2));
expected.put(artifactZv100Info, ImmutableSet.of(pluginB1));
expected.put(artifactZv200Info, ImmutableSet.of(pluginB1, pluginB2));
actual = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "B");
Assert.assertEquals(expected, actual);
// test getting plugins by namespace, type, and name
// get all of type A and name p1
Map<ArtifactDescriptor, PluginClass> expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv100Info, pluginA1);
expectedMap.put(artifactXv110Info, pluginA1);
expectedMap.put(artifactXv200Info, pluginA1);
expectedMap.put(artifactZv100Info, pluginA1);
expectedMap.put(artifactZv200Info, pluginA1);
Map<ArtifactDescriptor, PluginClass> actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// test get limited number
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, 1, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(ImmutableMap.of(artifactXv100Info, pluginA1), actualMap);
// test get DESC order
actualMap = new TreeMap<>(artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.DESC));
Assert.assertEquals(expectedMap, new TreeMap<>(actualMap).descendingMap());
// test Predicate
Predicate<ArtifactId> predicate = input -> {
try {
return input.getParent().equals(NamespaceId.DEFAULT) && input.getArtifact().equals("artifactX") && ArtifactVersionRange.parse("[1.0.0, 1.1.0)").versionIsInRange(new ArtifactVersion(input.getVersion()));
} catch (InvalidArtifactRangeException e) {
return false;
}
};
expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv100Info, pluginA1);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", predicate, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// test limit and order combined
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, 1, ArtifactSortOrder.DESC);
Assert.assertEquals(ImmutableMap.of(artifactZv200Info, pluginA1), actualMap);
// test limit, order, predicate combined
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", predicate, 1, ArtifactSortOrder.DESC);
Assert.assertEquals(ImmutableMap.of(artifactXv100Info, pluginA1), actualMap);
// get all of type A and name p2
expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv200Info, pluginA2);
expectedMap.put(artifactZv200Info, pluginA2);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p2", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// get all of type B and name p1
expectedMap = Maps.newHashMap();
expectedMap.put(artifactYv100Info, pluginB1);
expectedMap.put(artifactZv100Info, pluginB1);
expectedMap.put(artifactZv200Info, pluginB1);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "B", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// get all of type B and name p2
expectedMap = Maps.newHashMap();
expectedMap.put(artifactYv200Info, pluginB2);
expectedMap.put(artifactZv200Info, pluginB2);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "B", "p2", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
}
use of io.cdap.cdap.api.artifact.InvalidArtifactRangeException in project cdap by caskdata.
the class ArtifactRanges method parseArtifactRange.
/**
* Parses the string representation of an artifact range, which is of the form:
* {namespace}:{name}[{lower-version},{upper-version}]. This is what is returned by {@link #toString()}.
* For example, default:my-functions[1.0.0,2.0.0) will correspond to an artifact name of my-functions with a
* lower version of 1.0.0 and an upper version of 2.0.0 in the default namespace.
*
* @param artifactRangeStr the string representation to parse
* @return the ArtifactRange corresponding to the given string
*/
public static ArtifactRange parseArtifactRange(String artifactRangeStr) throws InvalidArtifactRangeException {
// get the namespace
int nameStartIndex = artifactRangeStr.indexOf(':');
if (nameStartIndex < 0) {
throw new InvalidArtifactRangeException(String.format("Invalid artifact range %s. Could not find ':' separating namespace from artifact name.", artifactRangeStr));
}
String namespaceStr = artifactRangeStr.substring(0, nameStartIndex);
try {
NamespaceId namespace = new NamespaceId(namespaceStr);
} catch (Exception e) {
throw new InvalidArtifactRangeException(String.format("Invalid namespace %s: %s", namespaceStr, e.getMessage()));
}
// check not at the end of the string
if (nameStartIndex == artifactRangeStr.length()) {
throw new InvalidArtifactRangeException(String.format("Invalid artifact range %s. Nothing found after namespace.", artifactRangeStr));
}
return parseArtifactRange(namespaceStr, artifactRangeStr.substring(nameStartIndex + 1));
}
use of io.cdap.cdap.api.artifact.InvalidArtifactRangeException in project cdap by caskdata.
the class HubPackage method installPlugin.
/**
* Downloads the plugin from the given URL and installs it in the artifact repository
*
* @param url URL that points to the plugin directory on the hub
* @param artifactRepository {@link ArtifactRepository} in which the plugin will be installed
* @param tmpDir temporary directory where plugin jar is downloaded from the hub
*/
public void installPlugin(URL url, ArtifactRepository artifactRepository, File tmpDir) throws Exception {
// Deserialize spec.json
URL specURL = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getPath() + "/packages/" + name + "/" + version + "/spec.json");
Spec spec = GSON.fromJson(HttpClients.doGetAsString(specURL), Spec.class);
for (Spec.Action action : spec.getActions()) {
// See https://cdap.atlassian.net/wiki/spaces/DOCS/pages/554401840/Hub+API?src=search#one_step_deploy_plugin
if (!action.getType().equals("one_step_deploy_plugin")) {
continue;
}
String configFilename = action.getConfigFilename();
if (configFilename == null) {
LOG.warn("Ignoring plugin {} due to missing config", name);
continue;
}
URL configURL = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getPath() + Joiner.on("/").join(Arrays.asList("/packages", name, version, configFilename)));
// Download plugin json from hub
JsonObject jsonObj = GSON.fromJson(HttpClients.doGetAsString(configURL), JsonObject.class);
List<String> parents = GSON.fromJson(jsonObj.get("parents"), new TypeToken<List<String>>() {
}.getType());
String jarName = action.getJarName();
if (jarName == null) {
LOG.warn("Ignoring plugin {} due to missing jar", name);
continue;
}
// Download plugin jar from hub
File destination = File.createTempFile("artifact-", ".jar", tmpDir);
FileChannel channel = new FileOutputStream(destination, false).getChannel();
URL jarURL = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getPath() + Joiner.on("/").join(Arrays.asList("/packages", name, version, jarName)));
HttpRequest request = HttpRequest.get(jarURL).withContentConsumer(new HttpContentConsumer() {
@Override
public boolean onReceived(ByteBuffer buffer) {
try {
channel.write(buffer);
} catch (IOException e) {
LOG.error("Failed write to file {}", destination);
return false;
}
return true;
}
@Override
public void onFinished() {
Closeables.closeQuietly(channel);
}
}).build();
HttpClients.executeStreamingRequest(request);
Set<ArtifactRange> parentArtifacts = new HashSet<>();
for (String parent : parents) {
try {
// try parsing it as a namespaced range like system:cdap-data-pipeline[6.3 1.1,7.0.0)
parentArtifacts.add(ArtifactRanges.parseArtifactRange(parent));
} catch (InvalidArtifactRangeException e) {
// if this failed, try parsing as a non-namespaced range like cdap-data-pipeline[6.3 1.1,7.0.0)
parentArtifacts.add(ArtifactRanges.parseArtifactRange(NamespaceId.DEFAULT.getNamespace(), parent));
}
}
// add the artifact to the repo
io.cdap.cdap.proto.id.ArtifactId artifactId = NamespaceId.DEFAULT.artifact(name, version);
Id.Artifact artifact = Id.Artifact.fromEntityId(artifactId);
try {
artifactRepository.addArtifact(artifact, destination, parentArtifacts, ImmutableSet.of());
} catch (ArtifactAlreadyExistsException e) {
LOG.debug("Artifact artifact {}-{} already exists", name, version);
}
Map<String, String> properties = GSON.fromJson(jsonObj.get("properties"), new TypeToken<Map<String, String>>() {
}.getType());
artifactRepository.writeArtifactProperties(Id.Artifact.fromEntityId(artifactId), properties);
if (!java.nio.file.Files.deleteIfExists(Paths.get(destination.getPath()))) {
LOG.warn("Failed to cleanup file {}", destination);
}
}
}
Aggregations