use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class MetadataHttpHandler method removeArtifactProperties.
@DELETE
@Path("/namespaces/{namespace-id}/artifacts/{artifact-name}/versions/{artifact-version}/metadata/properties")
public void removeArtifactProperties(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("artifact-name") String artifactName, @PathParam("artifact-version") String artifactVersionStr) throws NotFoundException {
ArtifactId artifactId = new ArtifactId(namespaceId, artifactName, artifactVersionStr);
metadataAdmin.removeProperties(artifactId);
responder.sendJson(HttpResponseStatus.OK, String.format("Metadata properties for artifact %s deleted successfully.", artifactId));
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class AbstractProgramRuntimeService method createPluginSnapshot.
/**
* Return the copy of the {@link ProgramOptions} including locations of plugin artifacts in it.
* @param options the {@link ProgramOptions} in which the locations of plugin artifacts needs to be included
* @param programId Id of the Program
* @param tempDir Temporary Directory to create the plugin artifact snapshot
* @param appSpec program's Application Specification
* @return the copy of the program options with locations of plugin artifacts included in them
*/
private ProgramOptions createPluginSnapshot(ProgramOptions options, ProgramId programId, File tempDir, @Nullable ApplicationSpecification appSpec) throws Exception {
// appSpec is null in an unit test
if (appSpec == null || appSpec.getPlugins().isEmpty()) {
return options;
}
Set<String> files = Sets.newHashSet();
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
builder.putAll(options.getArguments().asMap());
for (Map.Entry<String, Plugin> pluginEntry : appSpec.getPlugins().entrySet()) {
Plugin plugin = pluginEntry.getValue();
File destFile = new File(tempDir, Artifacts.getFileName(plugin.getArtifactId()));
// Skip if the file has already been copied.
if (!files.add(destFile.getName())) {
continue;
}
try {
ArtifactId artifactId = Artifacts.toArtifactId(programId.getNamespaceId(), plugin.getArtifactId());
copyArtifact(artifactId, noAuthArtifactRepository.getArtifact(Id.Artifact.fromEntityId(artifactId)), destFile);
} catch (ArtifactNotFoundException e) {
throw new IllegalArgumentException(String.format("Artifact %s could not be found", plugin.getArtifactId()), e);
}
}
LOG.debug("Plugin artifacts of {} copied to {}", programId, tempDir.getAbsolutePath());
builder.put(ProgramOptionConstants.PLUGIN_DIR, tempDir.getAbsolutePath());
return new SimpleProgramOptions(options.getProgramId(), new BasicArguments(builder.build()), options.getUserArguments(), options.isDebug());
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testConcurrentSnapshotWrite.
@Category(SlowTests.class)
@Test
public void testConcurrentSnapshotWrite() throws Exception {
// write parent
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, "content");
final ArtifactRange parentArtifacts = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0"));
// start up a bunch of threads that will try and write the same artifact at the same time
// only one of them should be able to write it
int numThreads = 20;
final Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "abc", "1.0.0-SNAPSHOT");
// use a barrier so they all try and write at the same time
final CyclicBarrier barrier = new CyclicBarrier(numThreads);
final CountDownLatch latch = new CountDownLatch(numThreads);
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
final String writer = String.valueOf(i);
executorService.execute(new Runnable() {
@Override
public void run() {
try {
barrier.await();
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + writer, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactId, meta, writer);
} catch (InterruptedException | BrokenBarrierException | ArtifactAlreadyExistsException | IOException e) {
// something went wrong, fail the test
throw new RuntimeException(e);
} catch (WriteConflictException e) {
// these are ok though unexpected (means couldn't write after a bunch of retries too)
} finally {
latch.countDown();
}
}
});
}
// wait for all writers to finish
latch.await();
// figure out which was the last writer by reading our data. all the writers should have been able to write,
// and they should have all overwritten each other in a consistent manner
ArtifactDetail detail = artifactStore.getArtifact(artifactId);
// figure out the winning writer from the plugin name, which is 'plugin<writer>'
String pluginName = detail.getMeta().getClasses().getPlugins().iterator().next().getName();
String winnerWriter = pluginName.substring("plugin".length());
ArtifactMeta expectedMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
assertEqual(artifactId, expectedMeta, winnerWriter, detail);
// check only 1 plugin remains and that its the correct one
Map<ArtifactDescriptor, Set<PluginClass>> pluginMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "plugin-type");
Map<ArtifactDescriptor, Set<PluginClass>> expected = Maps.newHashMap();
expected.put(detail.getDescriptor(), ImmutableSet.<PluginClass>of(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())));
Assert.assertEquals(expected, pluginMap);
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testPluginParentVersions.
// this test tests that when an artifact specifies a range of artifact versions it extends,
// those versions are honored
@Test
public void testPluginParentVersions() throws Exception {
// write an artifact that extends parent-[1.0.0, 2.0.0)
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "plugins", "0.1.0");
Set<ArtifactRange> parentArtifacts = ImmutableSet.of(new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0")));
Set<PluginClass> plugins = ImmutableSet.of(new PluginClass("atype", "plugin1", "", "c.c.c.plugin1", "cfg", ImmutableMap.<String, PluginPropertyField>of()));
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugins(plugins).build(), parentArtifacts);
writeArtifact(artifactId, meta, "some contents");
ArtifactDescriptor artifactInfo = artifactStore.getArtifact(artifactId).getDescriptor();
// check ids that are out of range. They should not return anything
List<Id.Artifact> badIds = Lists.newArrayList(// ids that are too low
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "0.9.9"), Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0-SNAPSHOT"), // ids that are too high
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "2.0.0"));
ArtifactMeta emptyMeta = new ArtifactMeta(ArtifactClasses.builder().build());
for (Id.Artifact badId : badIds) {
// write the parent artifact to make sure we don't get ArtifactNotFound exceptions with later calls
// we're testing range filtering, not the absence of the parent artifact
writeArtifact(badId, emptyMeta, "content");
Assert.assertTrue(artifactStore.getPluginClasses(NamespaceId.DEFAULT, badId).isEmpty());
Assert.assertTrue(artifactStore.getPluginClasses(NamespaceId.DEFAULT, badId, "atype").isEmpty());
try {
artifactStore.getPluginClasses(NamespaceId.DEFAULT, badId, "atype", "plugin1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.fail();
} catch (PluginNotExistsException e) {
// expected
}
}
// check ids that are in range return what we expect
List<Id.Artifact> goodIds = Lists.newArrayList(// ids that are too low
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0"), // ids that are too high
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.9.9"), Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.99.999"), Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "2.0.0-SNAPSHOT"));
Map<ArtifactDescriptor, Set<PluginClass>> expectedPluginsMapList = ImmutableMap.of(artifactInfo, plugins);
Map<ArtifactDescriptor, PluginClass> expectedPluginsMap = ImmutableMap.of(artifactInfo, plugins.iterator().next());
for (Id.Artifact goodId : goodIds) {
// make sure parent actually exists
writeArtifact(goodId, emptyMeta, "content");
Assert.assertEquals(expectedPluginsMapList, artifactStore.getPluginClasses(NamespaceId.DEFAULT, goodId));
Assert.assertEquals(expectedPluginsMapList, artifactStore.getPluginClasses(NamespaceId.DEFAULT, goodId, "atype"));
Assert.assertEquals(expectedPluginsMap, artifactStore.getPluginClasses(NamespaceId.DEFAULT, goodId, "atype", "plugin1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED));
}
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testUniversalPlugin.
@Test
public void testUniversalPlugin() throws Exception {
// First, deploy an artifact in the SYSTEM scope that doesn't have any plugin inside.
ArtifactId artifactId = NamespaceId.SYSTEM.artifact("artifact", "1.0.0");
writeArtifact(Id.Artifact.fromEntityId(artifactId), new ArtifactMeta(ArtifactClasses.builder().build()), "test");
// Deploy an artifact that has a plugin in the DEFAULT scope, but without any parent artifact
PluginClass pluginClass1 = new PluginClass("type1", "plugin1", "plugin1", "plugin1", null, Collections.emptyMap());
ArtifactId pluginArtifactId1 = NamespaceId.DEFAULT.artifact("plugin-artifact1", "0.0.1");
writeArtifact(Id.Artifact.fromEntityId(pluginArtifactId1), new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginClass1).build()), "test");
// Get the available plugins for the artifact, should get the plugin1
SortedMap<ArtifactDescriptor, Set<PluginClass>> plugins = artifactStore.getPluginClasses(NamespaceId.DEFAULT, Id.Artifact.fromEntityId(artifactId));
Assert.assertEquals(1, plugins.size());
List<PluginClass> pluginsClasses = plugins.values().stream().flatMap(Set::stream).collect(Collectors.toList());
Assert.assertEquals(1, pluginsClasses.size());
Assert.assertEquals(pluginClass1, pluginsClasses.get(0));
// Get the available plugins for the plugin artifact itself, should also get the plugin1
plugins = artifactStore.getPluginClasses(NamespaceId.DEFAULT, Id.Artifact.fromEntityId(pluginArtifactId1));
Assert.assertEquals(1, plugins.size());
pluginsClasses = plugins.values().stream().flatMap(Set::stream).collect(Collectors.toList());
Assert.assertEquals(1, pluginsClasses.size());
Assert.assertEquals(pluginClass1, pluginsClasses.get(0));
// Deploy an artifact that has a plugin in the DEFAULT scope with a parent artifact
PluginClass pluginClass2 = new PluginClass("type2", "plugin2", "plugin2", "plugin2", null, Collections.emptyMap());
ArtifactId pluginArtifactId2 = NamespaceId.DEFAULT.artifact("plugin-artifact2", "0.0.1");
ArtifactRange parentArtifactRange = new ArtifactRange(artifactId.getNamespace(), artifactId.getArtifact(), ArtifactVersionRange.parse("[1.0.0,2.0.0)"));
writeArtifact(Id.Artifact.fromEntityId(pluginArtifactId2), new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginClass2).build(), Collections.singleton(parentArtifactRange)), "test");
// Get the available plugins for the artifact again, should get plugin1 and plugin2
plugins = artifactStore.getPluginClasses(NamespaceId.DEFAULT, Id.Artifact.fromEntityId(artifactId));
Assert.assertEquals(2, plugins.size());
// Get and verify the plugins.
pluginsClasses = plugins.values().stream().flatMap(Set::stream).collect(Collectors.toList());
Assert.assertEquals(2, pluginsClasses.size());
// The plugins are sorted by the ArtifactDescriptor, hence order is guaranteed
Assert.assertEquals(Arrays.asList(pluginClass1, pluginClass2), pluginsClasses);
// Get available plugin by type.
for (PluginClass pluginClass : Arrays.asList(pluginClass1, pluginClass2)) {
plugins = artifactStore.getPluginClasses(NamespaceId.DEFAULT, Id.Artifact.fromEntityId(artifactId), pluginClass.getType());
Assert.assertEquals(1, plugins.size());
pluginsClasses = plugins.values().stream().flatMap(Set::stream).collect(Collectors.toList());
Assert.assertEquals(1, pluginsClasses.size());
Assert.assertEquals(pluginClass, pluginsClasses.get(0));
}
// Get plugins by parent ArtifactRange
for (PluginClass pluginClass : Arrays.asList(pluginClass1, pluginClass2)) {
SortedMap<ArtifactDescriptor, PluginClass> result = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactRange, pluginClass.getType(), pluginClass.getName(), null, 10, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(1, result.size());
Assert.assertEquals(pluginClass, result.values().stream().findFirst().get());
}
}
Aggregations