use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class AbstractProgramRuntimeServiceTest method testScopingRuntimeArguments.
@Test
public void testScopingRuntimeArguments() throws Exception {
Map<ProgramId, Arguments> argumentsMap = new ConcurrentHashMap<>();
ProgramRunnerFactory runnerFactory = createProgramRunnerFactory(argumentsMap);
final Program program = createDummyProgram();
final ProgramRuntimeService runtimeService = new AbstractProgramRuntimeService(CConfiguration.create(), runnerFactory, null, new NoOpProgramStateWriter()) {
@Override
public ProgramLiveInfo getLiveInfo(ProgramId programId) {
return new ProgramLiveInfo(programId, "runtime") {
};
}
@Override
protected Program createProgram(CConfiguration cConf, ProgramRunner programRunner, ProgramDescriptor programDescriptor, ArtifactDetail artifactDetail, File tempDir) throws IOException {
return program;
}
@Override
protected ArtifactDetail getArtifactDetail(ArtifactId artifactId) throws IOException, ArtifactNotFoundException {
co.cask.cdap.api.artifact.ArtifactId id = new co.cask.cdap.api.artifact.ArtifactId("dummy", new ArtifactVersion("1.0"), ArtifactScope.USER);
return new ArtifactDetail(new ArtifactDescriptor(id, Locations.toLocation(TEMP_FOLDER.newFile())), new ArtifactMeta(ArtifactClasses.builder().build()));
}
};
runtimeService.startAndWait();
try {
try {
ProgramDescriptor descriptor = new ProgramDescriptor(program.getId(), null, NamespaceId.DEFAULT.artifact("test", "1.0"));
// Set of scopes to test
String programScope = program.getType().getScope();
String clusterName = "c1";
List<String> scopes = Arrays.asList("cluster.*.", "cluster." + clusterName + ".", "cluster." + clusterName + ".app.*.", "app.*.", "app." + program.getApplicationId() + ".", "app." + program.getApplicationId() + "." + programScope + ".*.", "app." + program.getApplicationId() + "." + programScope + "." + program.getName() + ".", programScope + ".*.", programScope + "." + program.getName() + ".", "");
for (String scope : scopes) {
ProgramOptions programOptions = new SimpleProgramOptions(program.getId(), new BasicArguments(Collections.singletonMap(Constants.CLUSTER_NAME, clusterName)), new BasicArguments(Collections.singletonMap(scope + "size", Integer.toString(scope.length()))));
final ProgramController controller = runtimeService.run(descriptor, programOptions).getController();
Tasks.waitFor(ProgramController.State.COMPLETED, new Callable<ProgramController.State>() {
@Override
public ProgramController.State call() throws Exception {
return controller.getState();
}
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Should get an argument
Arguments args = argumentsMap.get(program.getId());
Assert.assertNotNull(args);
Assert.assertEquals(scope.length(), Integer.parseInt(args.getOption("size")));
}
} finally {
runtimeService.stopAndWait();
}
} finally {
runtimeService.stopAndWait();
}
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testConcurrentWrite.
@Category(SlowTests.class)
@Test
public void testConcurrentWrite() throws Exception {
// start up a bunch of threads that will try and write the same artifact at the same time
// only one of them should be able to write it
int numThreads = 20;
final Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "abc", "1.0.0");
final List<String> successfulWriters = Collections.synchronizedList(Lists.<String>newArrayList());
// use a barrier so they all try and write at the same time
final CyclicBarrier barrier = new CyclicBarrier(numThreads);
final CountDownLatch latch = new CountDownLatch(numThreads);
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
final String writer = String.valueOf(i);
executorService.execute(new Runnable() {
@Override
public void run() {
try {
barrier.await();
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + writer, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build());
writeArtifact(artifactId, meta, writer);
successfulWriters.add(writer);
} catch (InterruptedException | BrokenBarrierException | IOException e) {
// something went wrong, fail the test
throw new RuntimeException(e);
} catch (ArtifactAlreadyExistsException | WriteConflictException e) {
// these are ok, all but one thread should see this
} finally {
latch.countDown();
}
}
});
}
// wait for all writers to finish
latch.await();
// only one writer should have been able to write
Assert.assertEquals(1, successfulWriters.size());
String successfulWriter = successfulWriters.get(0);
// check that the contents weren't mixed between writers
ArtifactDetail info = artifactStore.getArtifact(artifactId);
ArtifactMeta expectedMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + successfulWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build());
assertEqual(artifactId, expectedMeta, successfulWriter, info);
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testSnapshotMutability.
@Test
public void testSnapshotMutability() throws Exception {
// write parent
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, "content");
ArtifactRange parentArtifacts = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0"));
// write the snapshot once
PluginClass plugin1 = new PluginClass("atype", "plugin1", "", "c.c.c.plugin1", "cfg", ImmutableMap.<String, PluginPropertyField>of());
PluginClass plugin2 = new PluginClass("atype", "plugin2", "", "c.c.c.plugin2", "cfg", ImmutableMap.<String, PluginPropertyField>of());
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "myplugins", "1.0.0-SNAPSHOT");
ArtifactMeta artifactMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugins(plugin1, plugin2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactId, artifactMeta, "abc123");
// update meta and jar contents
artifactMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(plugin2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactId, artifactMeta, "xyz321");
// check the metadata and contents got updated
ArtifactDetail detail = artifactStore.getArtifact(artifactId);
assertEqual(artifactId, artifactMeta, "xyz321", detail);
// check that plugin1 was deleted and plugin2 remains
Assert.assertEquals(ImmutableMap.of(detail.getDescriptor(), plugin2), artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, plugin2.getType(), plugin2.getName(), null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED));
try {
artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, plugin1.getType(), plugin1.getName(), null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.fail();
} catch (PluginNotExistsException e) {
// expected
}
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testAddGetSingleArtifact.
@Test
public void testAddGetSingleArtifact() throws Exception {
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "myplugins", "1.0.0");
PluginClass plugin1 = new PluginClass("atype", "plugin1", "", "c.c.c.plugin1", "cfg", ImmutableMap.<String, PluginPropertyField>of());
PluginClass plugin2 = new PluginClass("atype", "plugin2", "", "c.c.c.plugin2", "cfg", ImmutableMap.<String, PluginPropertyField>of());
PluginClass plugin3 = new PluginClass("btype", "plugin3", "", "c.c.c.plugin3", "cfg", ImmutableMap.<String, PluginPropertyField>of());
Set<PluginClass> plugins = ImmutableSet.of(plugin1, plugin2, plugin3);
ApplicationClass appClass = new ApplicationClass(InspectionApp.class.getName(), "", new ReflectionSchemaGenerator().generate(InspectionApp.AConfig.class));
ArtifactMeta artifactMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugins(plugins).addApp(appClass).build());
String artifactContents = "my artifact contents";
writeArtifact(artifactId, artifactMeta, artifactContents);
ArtifactDetail artifactDetail = artifactStore.getArtifact(artifactId);
assertEqual(artifactId, artifactMeta, artifactContents, artifactDetail);
// test that plugins in the artifact show up when getting plugins for that artifact
Map<ArtifactDescriptor, Set<PluginClass>> pluginsMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, artifactId);
Assert.assertEquals(1, pluginsMap.size());
Assert.assertTrue(pluginsMap.containsKey(artifactDetail.getDescriptor()));
Set<PluginClass> expected = ImmutableSet.copyOf(plugins);
Set<PluginClass> actual = ImmutableSet.copyOf(pluginsMap.get(artifactDetail.getDescriptor()));
Assert.assertEquals(expected, actual);
// test plugins for the specific type
pluginsMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, artifactId, "atype");
Assert.assertEquals(1, pluginsMap.size());
Assert.assertTrue(pluginsMap.containsKey(artifactDetail.getDescriptor()));
expected = ImmutableSet.of(plugin1, plugin2);
actual = ImmutableSet.copyOf(pluginsMap.get(artifactDetail.getDescriptor()));
Assert.assertEquals(expected, actual);
// test plugins for specific type and name
Map<ArtifactDescriptor, PluginClass> pluginClasses = artifactStore.getPluginClasses(NamespaceId.DEFAULT.getNamespaceId(), artifactId, "btype", "plugin3", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(1, pluginClasses.size());
Assert.assertTrue(pluginClasses.containsKey(artifactDetail.getDescriptor()));
Assert.assertEquals(plugin3, pluginClasses.get(artifactDetail.getDescriptor()));
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testGetPlugins.
@Test
public void testGetPlugins() throws Exception {
ArtifactRange parentArtifacts = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0"));
// we have 2 plugins of type A and 2 plugins of type B
PluginClass pluginA1 = new PluginClass("A", "p1", "desc", "c.p1", "cfg", ImmutableMap.of("threshold", new PluginPropertyField("thresh", "description", "double", true, false), "retry", new PluginPropertyField("retries", "description", "int", false, false)));
PluginClass pluginA2 = new PluginClass("A", "p2", "desc", "c.p2", "conf", ImmutableMap.of("stream", new PluginPropertyField("stream", "description", "string", true, false)));
PluginClass pluginB1 = new PluginClass("B", "p1", "desc", "c.p1", "cfg", ImmutableMap.of("createIfNotExist", new PluginPropertyField("createIfNotExist", "desc", "boolean", false, false)));
PluginClass pluginB2 = new PluginClass("B", "p2", "desc", "c.p2", "stuff", ImmutableMap.of("numer", new PluginPropertyField("numerator", "description", "double", true, false), "denom", new PluginPropertyField("denominator", "description", "double", true, false)));
// add artifacts
// not interested in artifact contents for this test, using some dummy value
String contents = "0";
// write parent
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, contents);
// artifact artifactX-1.0.0 contains plugin A1
Id.Artifact artifactXv100 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "1.0.0");
ArtifactMeta metaXv100 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginA1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactXv100, metaXv100, contents);
ArtifactDescriptor artifactXv100Info = artifactStore.getArtifact(artifactXv100).getDescriptor();
// artifact artifactX-1.1.0 contains plugin A1
Id.Artifact artifactXv110 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "1.1.0");
ArtifactMeta metaXv110 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginA1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactXv110, metaXv110, contents);
ArtifactDescriptor artifactXv110Info = artifactStore.getArtifact(artifactXv110).getDescriptor();
// artifact artifactX-2.0.0 contains plugins A1 and A2
Id.Artifact artifactXv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactX", "2.0.0");
ArtifactMeta metaXv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginA2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactXv200, metaXv200, contents);
ArtifactDescriptor artifactXv200Info = artifactStore.getArtifact(artifactXv200).getDescriptor();
// artifact artifactY-1.0.0 contains plugin B1
Id.Artifact artifactYv100 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactY", "1.0.0");
ArtifactMeta metaYv100 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginB1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactYv100, metaYv100, contents);
ArtifactDescriptor artifactYv100Info = artifactStore.getArtifact(artifactYv100).getDescriptor();
// artifact artifactY-2.0.0 contains plugin B2
Id.Artifact artifactYv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactY", "2.0.0");
ArtifactMeta metaYv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugin(pluginB2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactYv200, metaYv200, contents);
ArtifactDescriptor artifactYv200Info = artifactStore.getArtifact(artifactYv200).getDescriptor();
// artifact artifactZ-1.0.0 contains plugins A1 and B1
Id.Artifact artifactZv100 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactZ", "1.0.0");
ArtifactMeta metaZv100 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginB1).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactZv100, metaZv100, contents);
ArtifactDescriptor artifactZv100Info = artifactStore.getArtifact(artifactZv100).getDescriptor();
// artifact artifactZ-2.0.0 contains plugins A1, A2, B1, and B2
Id.Artifact artifactZv200 = Id.Artifact.from(Id.Namespace.DEFAULT, "artifactZ", "2.0.0");
ArtifactMeta metaZv200 = new ArtifactMeta(ArtifactClasses.builder().addPlugins(pluginA1, pluginA2, pluginB1, pluginB2).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactZv200, metaZv200, contents);
ArtifactDescriptor artifactZv200Info = artifactStore.getArtifact(artifactZv200).getDescriptor();
// test getting all plugins in the namespace
Map<ArtifactDescriptor, Set<PluginClass>> expected = Maps.newHashMap();
expected.put(artifactXv100Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv110Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv200Info, ImmutableSet.of(pluginA1, pluginA2));
expected.put(artifactYv100Info, ImmutableSet.of(pluginB1));
expected.put(artifactYv200Info, ImmutableSet.of(pluginB2));
expected.put(artifactZv100Info, ImmutableSet.of(pluginA1, pluginB1));
expected.put(artifactZv200Info, ImmutableSet.of(pluginA1, pluginA2, pluginB1, pluginB2));
Map<ArtifactDescriptor, Set<PluginClass>> actual = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId);
Assert.assertEquals(expected, actual);
// test getting all plugins by namespace and type
// get all of type A
expected = Maps.newHashMap();
expected.put(artifactXv100Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv110Info, ImmutableSet.of(pluginA1));
expected.put(artifactXv200Info, ImmutableSet.of(pluginA1, pluginA2));
expected.put(artifactZv100Info, ImmutableSet.of(pluginA1));
expected.put(artifactZv200Info, ImmutableSet.of(pluginA1, pluginA2));
actual = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A");
Assert.assertEquals(expected, actual);
// get all of type B
expected = Maps.newHashMap();
expected.put(artifactYv100Info, ImmutableSet.of(pluginB1));
expected.put(artifactYv200Info, ImmutableSet.of(pluginB2));
expected.put(artifactZv100Info, ImmutableSet.of(pluginB1));
expected.put(artifactZv200Info, ImmutableSet.of(pluginB1, pluginB2));
actual = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "B");
Assert.assertEquals(expected, actual);
// test getting plugins by namespace, type, and name
// get all of type A and name p1
Map<ArtifactDescriptor, PluginClass> expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv100Info, pluginA1);
expectedMap.put(artifactXv110Info, pluginA1);
expectedMap.put(artifactXv200Info, pluginA1);
expectedMap.put(artifactZv100Info, pluginA1);
expectedMap.put(artifactZv200Info, pluginA1);
Map<ArtifactDescriptor, PluginClass> actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// test get limited number
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, 1, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(ImmutableMap.of(artifactXv100Info, pluginA1), actualMap);
// test get DESC order
actualMap = new TreeMap<>(artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.DESC));
Assert.assertEquals(expectedMap, new TreeMap<>(actualMap).descendingMap());
// test Predicate
Predicate<ArtifactId> predicate = input -> {
try {
return input.getParent().equals(NamespaceId.DEFAULT) && input.getArtifact().equals("artifactX") && ArtifactVersionRange.parse("[1.0.0, 1.1.0)").versionIsInRange(new ArtifactVersion(input.getVersion()));
} catch (InvalidArtifactRangeException e) {
return false;
}
};
expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv100Info, pluginA1);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", predicate, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// test limit and order combined
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", null, 1, ArtifactSortOrder.DESC);
Assert.assertEquals(ImmutableMap.of(artifactZv200Info, pluginA1), actualMap);
// test limit, order, predicate combined
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p1", predicate, 1, ArtifactSortOrder.DESC);
Assert.assertEquals(ImmutableMap.of(artifactXv100Info, pluginA1), actualMap);
// get all of type A and name p2
expectedMap = Maps.newHashMap();
expectedMap.put(artifactXv200Info, pluginA2);
expectedMap.put(artifactZv200Info, pluginA2);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "A", "p2", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// get all of type B and name p1
expectedMap = Maps.newHashMap();
expectedMap.put(artifactYv100Info, pluginB1);
expectedMap.put(artifactZv100Info, pluginB1);
expectedMap.put(artifactZv200Info, pluginB1);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "B", "p1", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
// get all of type B and name p2
expectedMap = Maps.newHashMap();
expectedMap.put(artifactYv200Info, pluginB2);
expectedMap.put(artifactZv200Info, pluginB2);
actualMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "B", "p2", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(expectedMap, actualMap);
}
Aggregations