use of co.cask.cdap.api.plugin.PluginPropertyField in project cdap by caskdata.
the class ArtifactInspectorTest method inspectAppsAndPlugins.
@Test
public void inspectAppsAndPlugins() throws Exception {
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(ManifestFields.EXPORT_PACKAGE, InspectionApp.class.getPackage().getName());
File appFile = createJar(InspectionApp.class, new File(TMP_FOLDER.newFolder(), "InspectionApp-1.0.0.jar"), manifest);
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "InspectionApp", "1.0.0");
Location artifactLocation = Locations.toLocation(appFile);
try (CloseableClassLoader artifactClassLoader = classLoaderFactory.createClassLoader(ImmutableList.of(artifactLocation).iterator(), new EntityImpersonator(artifactId.toEntityId(), new DefaultImpersonator(CConfiguration.create(), null)))) {
ArtifactClasses classes = artifactInspector.inspectArtifact(artifactId, appFile, artifactClassLoader);
// check app classes
Set<ApplicationClass> expectedApps = ImmutableSet.of(new ApplicationClass(InspectionApp.class.getName(), "", new ReflectionSchemaGenerator(false).generate(InspectionApp.AConfig.class)));
Assert.assertEquals(expectedApps, classes.getApps());
// check plugin classes
PluginClass expectedPlugin = new PluginClass(InspectionApp.PLUGIN_TYPE, InspectionApp.PLUGIN_NAME, InspectionApp.PLUGIN_DESCRIPTION, InspectionApp.AppPlugin.class.getName(), "pluginConf", ImmutableMap.of("y", new PluginPropertyField("y", "", "double", true, true), "isSomething", new PluginPropertyField("isSomething", "", "boolean", true, false)));
Assert.assertEquals(ImmutableSet.of(expectedPlugin), classes.getPlugins());
}
}
use of co.cask.cdap.api.plugin.PluginPropertyField in project cdap by caskdata.
the class ArtifactStoreTest method testGetPluginsByParentArtifactRanges.
@Test
public void testGetPluginsByParentArtifactRanges() throws Exception {
ArtifactRange parentArtifacts1 = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent1", new ArtifactVersion("1.0.0"), new ArtifactVersion("5.0.0"));
// we have 2 plugins of type A and 2 plugins of type B
PluginClass pluginA1 = new PluginClass("A", "p1", "desc", "c.p1", "cfg", ImmutableMap.of("threshold", new PluginPropertyField("thresh", "description", "double", true, false), "retry", new PluginPropertyField("retries", "description", "int", false, false)));
PluginClass pluginA2 = new PluginClass("A", "p2", "desc", "c.p2", "conf", ImmutableMap.of("stream", new PluginPropertyField("stream", "description", "string", true, false)));
// add artifacts
// not interested in artifact contents for this test, using some dummy value
String contents = "0";
// write parent artifacts
List<String> parentArtifactsVersions = ImmutableList.of("1.0.0", "1.2.1", "2.0.0", "3.0.0", "4.0.0");
for (String artifactVersion : parentArtifactsVersions) {
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent1", artifactVersion);
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, contents);
}
// artifact artifactX-1.0.0 contains plugin A1
Id.Artifact artifactXv100 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "1.0.0");
ArtifactMeta metaXv100 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginA1).build(), ImmutableSet.of(parentArtifacts1));
writeArtifact(artifactXv100, metaXv100, contents);
ArtifactDescriptor artifactXv100Info = artifactStore.getArtifact(artifactXv100).getDescriptor();
// artifact artifactX-1.1.0 contains plugin A1
Id.Artifact artifactXv110 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "1.1.0");
ArtifactMeta metaXv110 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginA1).build(), ImmutableSet.of(parentArtifacts1));
writeArtifact(artifactXv110, metaXv110, contents);
ArtifactDescriptor artifactXv110Info = artifactStore.getArtifact(artifactXv110).getDescriptor();
// artifact artifactX-2.0.0 contains plugins A1 and A2
Id.Artifact artifactXv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "2.0.0");
ArtifactMeta metaXv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginA2).build(), ImmutableSet.of(parentArtifacts1));
writeArtifact(artifactXv200, metaXv200, contents);
ArtifactDescriptor artifactXv200Info = artifactStore.getArtifact(artifactXv200).getDescriptor();
ArtifactRange parentArtifactsrange1 = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent1", new ArtifactVersion("3.0.0"), new ArtifactVersion("5.0.0"));
// artifact artifactZ-2.0.0 contains plugins A1, A2
Id.Artifact artifactZv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactZ", "2.0.0");
ArtifactMeta metaZv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginA2).build(), ImmutableSet.of(parentArtifactsrange1));
writeArtifact(artifactZv200, metaZv200, contents);
ArtifactDescriptor artifactZv200Info = artifactStore.getArtifact(artifactZv200).getDescriptor();
// artifact written with this range should not come up as their parent range is out of the parent artifact range.
ArtifactRange parentArtifactsOutOfRange1 = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent1", new ArtifactVersion("5.0.0"), new ArtifactVersion("8.0.0"));
// artifact artifactZ-2.0.0 contains plugins A1, A2, B1, and B2
Id.Artifact artifactZv300 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactZ", "3.0.0");
ArtifactMeta metaZv300 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginA2).build(), ImmutableSet.of(parentArtifactsOutOfRange1));
writeArtifact(artifactZv300, metaZv300, contents);
Map<ArtifactDescriptor, PluginClass> expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv100Info, pluginA1);
expectedMap.put(artifactXv110Info, pluginA1);
expectedMap.put(artifactXv200Info, pluginA1);
expectedMap.put(artifactZv200Info, pluginA1);
Assert.assertEquals(expectedMap, artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifacts1, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED));
// test limited number
Assert.assertEquals(ImmutableMap.of(artifactXv100Info, pluginA1), artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifacts1, "A", "p1", null, 1, ArtifactSortOrder.UNORDERED));
// test DESC order
Assert.assertEquals(expectedMap, new TreeMap<>(artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifacts1, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.DESC)).descendingMap());
ArtifactRange parentArtifactsSub1 = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent1", new ArtifactVersion("1.1.0"), new ArtifactVersion("2.0.0"));
expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv100Info, pluginA1);
expectedMap.put(artifactXv110Info, pluginA1);
expectedMap.put(artifactXv200Info, pluginA1);
// artifactZv200Info wont be here, as the parent range 3.0.0-5.0.0 for artifactZv200 plugin
// wont match the 1.2.1 parent artifact version
Assert.assertEquals(expectedMap, artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactsSub1, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED));
expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv200Info, pluginA2);
expectedMap.put(artifactZv200Info, pluginA2);
Assert.assertEquals(expectedMap, artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifacts1, "A", "p2", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED));
ArtifactRange parentArtifactsSub2 = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent1", new ArtifactVersion("5.0.0"), new ArtifactVersion("10.0.0"));
try {
artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactsSub2, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.fail("Get plugin class for invalid range should not retrun result");
} catch (ArtifactNotFoundException e) {
// no-op
}
}
use of co.cask.cdap.api.plugin.PluginPropertyField in project cdap by caskdata.
the class ArtifactStoreTest method testConcurrentSnapshotWrite.
@Category(SlowTests.class)
@Test
public void testConcurrentSnapshotWrite() throws Exception {
// write parent
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, "content");
final ArtifactRange parentArtifacts = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0"));
// start up a bunch of threads that will try and write the same artifact at the same time
// only one of them should be able to write it
int numThreads = 20;
final Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "abc", "1.0.0-SNAPSHOT");
// use a barrier so they all try and write at the same time
final CyclicBarrier barrier = new CyclicBarrier(numThreads);
final CountDownLatch latch = new CountDownLatch(numThreads);
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
final String writer = String.valueOf(i);
executorService.execute(new Runnable() {
@Override
public void run() {
try {
barrier.await();
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + writer, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactId, meta, writer);
} catch (InterruptedException | BrokenBarrierException | ArtifactAlreadyExistsException | IOException e) {
// something went wrong, fail the test
throw new RuntimeException(e);
} catch (WriteConflictException e) {
// these are ok though unexpected (means couldn't write after a bunch of retries too)
} finally {
latch.countDown();
}
}
});
}
// wait for all writers to finish
latch.await();
// figure out which was the last writer by reading our data. all the writers should have been able to write,
// and they should have all overwritten each other in a consistent manner
ArtifactDetail detail = artifactStore.getArtifact(artifactId);
// figure out the winning writer from the plugin name, which is 'plugin<writer>'
String pluginName = detail.getMeta().getClasses().getPlugins().iterator().next().getName();
String winnerWriter = pluginName.substring("plugin".length());
ArtifactMeta expectedMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
assertEqual(artifactId, expectedMeta, winnerWriter, detail);
// check only 1 plugin remains and that its the correct one
Map<ArtifactDescriptor, Set<PluginClass>> pluginMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "plugin-type");
Map<ArtifactDescriptor, Set<PluginClass>> expected = Maps.newHashMap();
expected.put(detail.getDescriptor(), ImmutableSet.<PluginClass>of(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())));
Assert.assertEquals(expected, pluginMap);
}
use of co.cask.cdap.api.plugin.PluginPropertyField in project cdap by caskdata.
the class ArtifactStoreTest method testPluginParentVersions.
// this test tests that when an artifact specifies a range of artifact versions it extends,
// those versions are honored
@Test
public void testPluginParentVersions() throws Exception {
// write an artifact that extends parent-[1.0.0, 2.0.0)
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "plugins", "0.1.0");
Set<ArtifactRange> parentArtifacts = ImmutableSet.of(new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0")));
Set<PluginClass> plugins = ImmutableSet.of(new PluginClass("atype", "plugin1", "", "c.c.c.plugin1", "cfg", ImmutableMap.<String, PluginPropertyField>of()));
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugins(plugins).build(), parentArtifacts);
writeArtifact(artifactId, meta, "some contents");
ArtifactDescriptor artifactInfo = artifactStore.getArtifact(artifactId).getDescriptor();
// check ids that are out of range. They should not return anything
List<Id.Artifact> badIds = Lists.newArrayList(// ids that are too low
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "0.9.9"), Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0-SNAPSHOT"), // ids that are too high
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "2.0.0"));
ArtifactMeta emptyMeta = new ArtifactMeta(ArtifactClasses.builder().build());
for (Id.Artifact badId : badIds) {
// write the parent artifact to make sure we don't get ArtifactNotFound exceptions with later calls
// we're testing range filtering, not the absence of the parent artifact
writeArtifact(badId, emptyMeta, "content");
Assert.assertTrue(artifactStore.getPluginClasses(NamespaceId.DEFAULT, badId).isEmpty());
Assert.assertTrue(artifactStore.getPluginClasses(NamespaceId.DEFAULT, badId, "atype").isEmpty());
try {
artifactStore.getPluginClasses(NamespaceId.DEFAULT, badId, "atype", "plugin1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.fail();
} catch (PluginNotExistsException e) {
// expected
}
}
// check ids that are in range return what we expect
List<Id.Artifact> goodIds = Lists.newArrayList(// ids that are too low
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0"), // ids that are too high
Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.9.9"), Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.99.999"), Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "2.0.0-SNAPSHOT"));
Map<ArtifactDescriptor, Set<PluginClass>> expectedPluginsMapList = ImmutableMap.of(artifactInfo, plugins);
Map<ArtifactDescriptor, PluginClass> expectedPluginsMap = ImmutableMap.of(artifactInfo, plugins.iterator().next());
for (Id.Artifact goodId : goodIds) {
// make sure parent actually exists
writeArtifact(goodId, emptyMeta, "content");
Assert.assertEquals(expectedPluginsMapList, artifactStore.getPluginClasses(NamespaceId.DEFAULT, goodId));
Assert.assertEquals(expectedPluginsMapList, artifactStore.getPluginClasses(NamespaceId.DEFAULT, goodId, "atype"));
Assert.assertEquals(expectedPluginsMap, artifactStore.getPluginClasses(NamespaceId.DEFAULT, goodId, "atype", "plugin1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED));
}
}
use of co.cask.cdap.api.plugin.PluginPropertyField in project cdap by caskdata.
the class ArtifactStoreTest method testDelete.
@Test
public void testDelete() throws Exception {
// write an artifact with an app
Id.Artifact parentId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ApplicationClass appClass = new ApplicationClass(InspectionApp.class.getName(), "", new ReflectionSchemaGenerator().generate(InspectionApp.AConfig.class));
ArtifactMeta artifactMeta = new ArtifactMeta(ArtifactClasses.builder().addApp(appClass).build());
writeArtifact(parentId, artifactMeta, "parent contents");
// write a child artifact that extends the parent with some plugins
Id.Artifact childId = Id.Artifact.from(Id.Namespace.DEFAULT, "myplugins", "1.0.0");
List<PluginClass> plugins = ImmutableList.of(new PluginClass("atype", "plugin1", "", "c.c.c.plugin1", "cfg", ImmutableMap.<String, PluginPropertyField>of()), new PluginClass("atype", "plugin2", "", "c.c.c.plugin2", "cfg", ImmutableMap.<String, PluginPropertyField>of()));
Set<ArtifactRange> parents = ImmutableSet.of(new ArtifactRange(parentId.getNamespace().getId(), parentId.getName(), new ArtifactVersion("0.1.0"), new ArtifactVersion("2.0.0")));
artifactMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugins(plugins).build(), parents);
writeArtifact(childId, artifactMeta, "child contents");
// check parent has plugins from the child
Assert.assertFalse(artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentId).isEmpty());
// delete the child artifact
artifactStore.delete(childId);
// shouldn't be able to get artifact detail
try {
artifactStore.getArtifact(childId);
Assert.fail();
} catch (ArtifactNotFoundException e) {
// expected
}
// shouldn't see it in the list
List<ArtifactDetail> artifactList = artifactStore.getArtifacts(parentId.getNamespace().toEntityId());
Assert.assertEquals(1, artifactList.size());
Assert.assertEquals(parentId.getName(), artifactList.get(0).getDescriptor().getArtifactId().getName());
// shouldn't see any more plugins for parent
Assert.assertTrue(artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentId).isEmpty());
// delete parent
artifactStore.delete(parentId);
// nothing should be in the list
Assert.assertTrue(artifactStore.getArtifacts(parentId.getNamespace().toEntityId()).isEmpty());
// shouldn't be able to see app class either
Assert.assertTrue(artifactStore.getApplicationClasses(NamespaceId.DEFAULT, appClass.getClassName()).isEmpty());
}
Aggregations