use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ListArtifactPluginsCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
String artifactName = arguments.get(ArgumentName.ARTIFACT_NAME.toString());
String artifactVersion = arguments.get(ArgumentName.ARTIFACT_VERSION.toString());
ArtifactId artifactId = cliConfig.getCurrentNamespace().artifact(artifactName, artifactVersion);
String pluginType = arguments.get(ArgumentName.PLUGIN_TYPE.toString());
final List<PluginSummary> pluginSummaries;
String scopeStr = arguments.getOptional(ArgumentName.SCOPE.toString());
if (scopeStr == null) {
pluginSummaries = artifactClient.getPluginSummaries(artifactId, pluginType);
} else {
pluginSummaries = artifactClient.getPluginSummaries(artifactId, pluginType, ArtifactScope.valueOf(scopeStr.toUpperCase()));
}
Table table = Table.builder().setHeader("type", "name", "classname", "description", "artifact").setRows(pluginSummaries, new RowMaker<PluginSummary>() {
@Override
public List<?> makeRow(PluginSummary object) {
return Lists.newArrayList(object.getType(), object.getName(), object.getClassName(), object.getDescription(), object.getArtifact().toString());
}
}).build();
cliConfig.getTableRenderer().render(cliConfig, output, table);
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class MetadataDatasetTest method testSearchDifferentEntityScope.
@Test
public void testSearchDifferentEntityScope() throws InterruptedException, TransactionFailureException {
final ArtifactId sysArtifact = NamespaceId.SYSTEM.artifact("artifact", "1.0");
final ArtifactId nsArtifact = new ArtifactId("ns1", "artifact", "1.0");
final String multiWordKey = "multiword";
final String multiWordValue = "aV1 av2 , - , av3 - av4_av5 av6";
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
dataset.setProperty(nsArtifact, multiWordKey, multiWordValue);
dataset.setProperty(sysArtifact, multiWordKey, multiWordValue);
}
});
final MetadataEntry systemArtifactEntry = new MetadataEntry(sysArtifact, multiWordKey, multiWordValue);
final MetadataEntry nsArtifactEntry = new MetadataEntry(nsArtifact, multiWordKey, multiWordValue);
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
List<MetadataEntry> results = dataset.search("ns1", "aV5", ImmutableSet.of(EntityTypeSimpleName.ALL), SortInfo.DEFAULT, 0, Integer.MAX_VALUE, 1, null, false, EnumSet.of(EntityScope.USER)).getResults();
// the result should not contain system entities
Assert.assertEquals(Sets.newHashSet(nsArtifactEntry), Sets.newHashSet(results));
results = dataset.search("ns1", "aV5", ImmutableSet.of(EntityTypeSimpleName.ALL), SortInfo.DEFAULT, 0, Integer.MAX_VALUE, 1, null, false, EnumSet.of(EntityScope.SYSTEM)).getResults();
// the result should not contain user entities
Assert.assertEquals(Sets.newHashSet(systemArtifactEntry), Sets.newHashSet(results));
results = dataset.search("ns1", "aV5", ImmutableSet.of(EntityTypeSimpleName.ALL), SortInfo.DEFAULT, 0, Integer.MAX_VALUE, 1, null, false, EnumSet.allOf(EntityScope.class)).getResults();
// the result should contain both entity scopes
Assert.assertEquals(Sets.newHashSet(nsArtifactEntry, systemArtifactEntry), Sets.newHashSet(results));
}
});
// clean up
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
dataset.removeProperties(nsArtifact);
dataset.removeProperties(sysArtifact);
}
});
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class MetadataDatasetTest method testSearchIncludesSystemEntities.
@Test
public void testSearchIncludesSystemEntities() throws InterruptedException, TransactionFailureException {
// Use the same artifact in two different namespaces - system and ns2
final ArtifactId sysArtifact = NamespaceId.SYSTEM.artifact("artifact", "1.0");
final ArtifactId ns2Artifact = new ArtifactId("ns2", "artifact", "1.0");
final String multiWordKey = "multiword";
final String multiWordValue = "aV1 av2 , - , av3 - av4_av5 av6";
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
dataset.setProperty(flow1, multiWordKey, multiWordValue);
dataset.setProperty(sysArtifact, multiWordKey, multiWordValue);
dataset.setProperty(ns2Artifact, multiWordKey, multiWordValue);
}
});
// perform the exact same multiword search in the 'ns1' namespace. It should return the system artifact along with
// matched entities in the 'ns1' namespace
final MetadataEntry flowMultiWordEntry = new MetadataEntry(flow1, multiWordKey, multiWordValue);
final MetadataEntry systemArtifactEntry = new MetadataEntry(sysArtifact, multiWordKey, multiWordValue);
final MetadataEntry ns2ArtifactEntry = new MetadataEntry(ns2Artifact, multiWordKey, multiWordValue);
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
List<MetadataEntry> results = searchByDefaultIndex("ns1", "aV5", ImmutableSet.of(EntityTypeSimpleName.ALL));
Assert.assertEquals(Sets.newHashSet(flowMultiWordEntry, systemArtifactEntry), Sets.newHashSet(results));
// search only programs - should only return flow
results = searchByDefaultIndex("ns1", multiWordKey + MetadataDataset.KEYVALUE_SEPARATOR + "aV5", ImmutableSet.of(EntityTypeSimpleName.PROGRAM));
Assert.assertEquals(ImmutableList.of(flowMultiWordEntry), results);
// search only artifacts - should only return system artifact
results = searchByDefaultIndex("ns1", multiWordKey + MetadataDataset.KEYVALUE_SEPARATOR + multiWordValue, ImmutableSet.of(EntityTypeSimpleName.ARTIFACT));
// this query returns the system artifact 4 times, since the dataset returns a list with duplicates for scoring
// purposes. Convert to a Set for comparison.
Assert.assertEquals(Sets.newHashSet(systemArtifactEntry), Sets.newHashSet(results));
// search all entities in namespace 'ns2' - should return the system artifact and the same artifact in ns2
results = searchByDefaultIndex("ns2", multiWordKey + MetadataDataset.KEYVALUE_SEPARATOR + "aV4", ImmutableSet.of(EntityTypeSimpleName.ALL));
Assert.assertEquals(Sets.newHashSet(systemArtifactEntry, ns2ArtifactEntry), Sets.newHashSet(results));
// search only programs in a namespace 'ns2'. Should return empty
results = searchByDefaultIndex("ns2", "aV*", ImmutableSet.of(EntityTypeSimpleName.PROGRAM));
Assert.assertTrue(results.isEmpty());
// search all entities in namespace 'ns3'. Should return only the system artifact
results = searchByDefaultIndex("ns3", "av*", ImmutableSet.of(EntityTypeSimpleName.ALL));
Assert.assertEquals(Sets.newHashSet(systemArtifactEntry), Sets.newHashSet(results));
// search the system namespace for all entities. Should return only the system artifact
results = searchByDefaultIndex(NamespaceId.SYSTEM.getEntityName(), "av*", ImmutableSet.of(EntityTypeSimpleName.ALL));
Assert.assertEquals(Sets.newHashSet(systemArtifactEntry), Sets.newHashSet(results));
}
});
// clean up
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
dataset.removeProperties(flow1);
dataset.removeProperties(sysArtifact);
}
});
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testConcurrentWrite.
@Category(SlowTests.class)
@Test
public void testConcurrentWrite() throws Exception {
// start up a bunch of threads that will try and write the same artifact at the same time
// only one of them should be able to write it
int numThreads = 20;
final Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "abc", "1.0.0");
final List<String> successfulWriters = Collections.synchronizedList(Lists.<String>newArrayList());
// use a barrier so they all try and write at the same time
final CyclicBarrier barrier = new CyclicBarrier(numThreads);
final CountDownLatch latch = new CountDownLatch(numThreads);
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
final String writer = String.valueOf(i);
executorService.execute(new Runnable() {
@Override
public void run() {
try {
barrier.await();
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + writer, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build());
writeArtifact(artifactId, meta, writer);
successfulWriters.add(writer);
} catch (InterruptedException | BrokenBarrierException | IOException e) {
// something went wrong, fail the test
throw new RuntimeException(e);
} catch (ArtifactAlreadyExistsException | WriteConflictException e) {
// these are ok, all but one thread should see this
} finally {
latch.countDown();
}
}
});
}
// wait for all writers to finish
latch.await();
// only one writer should have been able to write
Assert.assertEquals(1, successfulWriters.size());
String successfulWriter = successfulWriters.get(0);
// check that the contents weren't mixed between writers
ArtifactDetail info = artifactStore.getArtifact(artifactId);
ArtifactMeta expectedMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + successfulWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build());
assertEqual(artifactId, expectedMeta, successfulWriter, info);
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class ArtifactStoreTest method testAddGetSingleArtifact.
@Test
public void testAddGetSingleArtifact() throws Exception {
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "myplugins", "1.0.0");
PluginClass plugin1 = new PluginClass("atype", "plugin1", "", "c.c.c.plugin1", "cfg", ImmutableMap.<String, PluginPropertyField>of());
PluginClass plugin2 = new PluginClass("atype", "plugin2", "", "c.c.c.plugin2", "cfg", ImmutableMap.<String, PluginPropertyField>of());
PluginClass plugin3 = new PluginClass("btype", "plugin3", "", "c.c.c.plugin3", "cfg", ImmutableMap.<String, PluginPropertyField>of());
Set<PluginClass> plugins = ImmutableSet.of(plugin1, plugin2, plugin3);
ApplicationClass appClass = new ApplicationClass(InspectionApp.class.getName(), "", new ReflectionSchemaGenerator().generate(InspectionApp.AConfig.class));
ArtifactMeta artifactMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugins(plugins).addApp(appClass).build());
String artifactContents = "my artifact contents";
writeArtifact(artifactId, artifactMeta, artifactContents);
ArtifactDetail artifactDetail = artifactStore.getArtifact(artifactId);
assertEqual(artifactId, artifactMeta, artifactContents, artifactDetail);
// test that plugins in the artifact show up when getting plugins for that artifact
Map<ArtifactDescriptor, Set<PluginClass>> pluginsMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, artifactId);
Assert.assertEquals(1, pluginsMap.size());
Assert.assertTrue(pluginsMap.containsKey(artifactDetail.getDescriptor()));
Set<PluginClass> expected = ImmutableSet.copyOf(plugins);
Set<PluginClass> actual = ImmutableSet.copyOf(pluginsMap.get(artifactDetail.getDescriptor()));
Assert.assertEquals(expected, actual);
// test plugins for the specific type
pluginsMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, artifactId, "atype");
Assert.assertEquals(1, pluginsMap.size());
Assert.assertTrue(pluginsMap.containsKey(artifactDetail.getDescriptor()));
expected = ImmutableSet.of(plugin1, plugin2);
actual = ImmutableSet.copyOf(pluginsMap.get(artifactDetail.getDescriptor()));
Assert.assertEquals(expected, actual);
// test plugins for specific type and name
Map<ArtifactDescriptor, PluginClass> pluginClasses = artifactStore.getPluginClasses(NamespaceId.DEFAULT.getNamespaceId(), artifactId, "btype", "plugin3", null, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
Assert.assertEquals(1, pluginClasses.size());
Assert.assertTrue(pluginClasses.containsKey(artifactDetail.getDescriptor()));
Assert.assertEquals(plugin3, pluginClasses.get(artifactDetail.getDescriptor()));
}
Aggregations