use of io.cdap.cdap.spi.metadata.Read in project cdap by caskdata.
the class DatasetMetadataStorage method readScope.
private MetadataDataset.Record readScope(MetadataDatasetContext context, MetadataScope scope, Read read) {
MetadataEntity entity = read.getEntity();
if (read.getSelection() == null && (!read.getScopes().contains(scope) || read.getKinds().isEmpty())) {
return new MetadataDataset.Record(entity);
}
Set<ScopedNameOfKind> selectionForScope = null;
if (read.getSelection() != null) {
// noinspection ConstantConditions
selectionForScope = Sets.filter(read.getSelection(), entry -> entry.getScope() == scope);
if (selectionForScope.isEmpty()) {
return new MetadataDataset.Record(entity);
}
}
// now we know we must read from the dataset
MetadataDataset dataset = context.getDataset(scope);
if (selectionForScope != null) {
// request is for a specific set of tags and properties
Set<String> tagsToRead = selectionForScope.stream().filter(entry -> TAG == entry.getKind()).map(ScopedName::getName).collect(Collectors.toSet());
Set<String> propertiesToRead = selectionForScope.stream().filter(entry -> PROPERTY == entry.getKind()).map(ScopedName::getName).collect(Collectors.toSet());
Set<String> tags = tagsToRead.isEmpty() ? Collections.emptySet() : Sets.intersection(tagsToRead, dataset.getTags(entity));
Map<String, String> properties = propertiesToRead.isEmpty() ? Collections.emptyMap() : Maps.filterKeys(dataset.getProperties(entity), propertiesToRead::contains);
return new MetadataDataset.Record(entity, properties, tags);
}
if (MetadataKind.ALL.equals(read.getKinds())) {
// all metadata kinds requested
return dataset.getMetadata(entity);
}
// exactly one kind is requested
MetadataKind requestKind = read.getKinds().iterator().next();
if (requestKind == TAG) {
return new MetadataDataset.Record(entity, Collections.emptyMap(), dataset.getTags(entity));
}
if (requestKind == PROPERTY) {
return new MetadataDataset.Record(entity, dataset.getProperties(entity), Collections.emptySet());
}
throw new IllegalStateException("Encountered metadata read request for unknown kind " + requestKind);
}
use of io.cdap.cdap.spi.metadata.Read in project cdap by caskdata.
the class MetadataSubscriberServiceTest method testProfileMetadata.
@Test
public void testProfileMetadata() throws Exception {
Injector injector = getInjector();
ApplicationSpecification appSpec = Specifications.from(new AppWithWorkflow());
ApplicationId appId = NamespaceId.DEFAULT.app(appSpec.getName());
ProgramId workflowId = appId.workflow("SampleWorkflow");
ScheduleId scheduleId = appId.schedule("tsched1");
// publish a creation of a schedule that will never exist
// this tests that such a message is eventually discarded
// note that for this test, we configure a fast retry strategy and a small number of retries
// therefore this will cost only a few seconds delay
publishBogusCreationEvent();
// get the mds should be empty property since we haven't started the MetadataSubscriberService
MetadataStorage mds = injector.getInstance(MetadataStorage.class);
Assert.assertEquals(Collections.emptyMap(), mds.read(new Read(workflowId.toMetadataEntity())).getProperties());
Assert.assertEquals(Collections.emptyMap(), mds.read(new Read(scheduleId.toMetadataEntity())).getProperties());
// add a app with workflow to app meta store
// note: since we bypass the app-fabric when adding this app, no ENTITY_CREATION message
// will be published for the app (it happens in app lifecycle service). Therefore this
// app must exist before assigning the profile for the namespace, otherwise the app's
// programs will not receive the profile metadata.
Store store = injector.getInstance(DefaultStore.class);
store.addApplication(appId, appSpec);
// set default namespace to use the profile, since now MetadataSubscriberService is not started,
// it should not affect the mds
PreferencesService preferencesService = injector.getInstance(PreferencesService.class);
preferencesService.setProperties(NamespaceId.DEFAULT, Collections.singletonMap(SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName()));
// add a schedule to schedule store
ProgramScheduleService scheduleService = injector.getInstance(ProgramScheduleService.class);
scheduleService.add(new ProgramSchedule("tsched1", "one time schedule", workflowId, Collections.emptyMap(), new TimeTrigger("* * ? * 1"), ImmutableList.of()));
// add a new profile in default namespace
ProfileService profileService = injector.getInstance(ProfileService.class);
ProfileId myProfile = new ProfileId(NamespaceId.DEFAULT.getNamespace(), "MyProfile");
Profile profile1 = new Profile("MyProfile", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
profileService.saveProfile(myProfile, profile1);
// add a second profile in default namespace
ProfileId myProfile2 = new ProfileId(NamespaceId.DEFAULT.getNamespace(), "MyProfile2");
Profile profile2 = new Profile("MyProfile2", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
profileService.saveProfile(myProfile2, profile2);
try {
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// set default namespace to use my profile
preferencesService.setProperties(NamespaceId.DEFAULT, Collections.singletonMap(SystemArguments.PROFILE_NAME, "USER:MyProfile"));
// Verify the workflow profile metadata is updated to my profile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to my profile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// set app level to use my profile 2
preferencesService.setProperties(appId, Collections.singletonMap(SystemArguments.PROFILE_NAME, "USER:MyProfile2"));
// set instance level to system profile
preferencesService.setProperties(Collections.singletonMap(SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName()));
// Verify the workflow profile metadata is updated to MyProfile2 which is at app level
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to MyProfile2 which is at app level
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove the preferences at instance level, should not affect the metadata
preferencesService.deleteProperties();
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove app level pref should let the programs/schedules use ns level pref
preferencesService.deleteProperties(appId);
// Verify the workflow profile metadata is updated to MyProfile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to MyProfile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove ns level pref so no pref is there
preferencesService.deleteProperties(NamespaceId.DEFAULT);
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
} finally {
// stop and clean up the store
preferencesService.deleteProperties(NamespaceId.DEFAULT);
preferencesService.deleteProperties();
preferencesService.deleteProperties(appId);
store.removeAll(NamespaceId.DEFAULT);
scheduleService.delete(scheduleId);
profileService.disableProfile(myProfile);
profileService.disableProfile(myProfile2);
profileService.deleteAllProfiles(myProfile.getNamespaceId());
mds.apply(new MetadataMutation.Drop(workflowId.toMetadataEntity()), MutationOptions.DEFAULT);
mds.apply(new MetadataMutation.Drop(scheduleId.toMetadataEntity()), MutationOptions.DEFAULT);
}
}
use of io.cdap.cdap.spi.metadata.Read in project cdap by caskdata.
the class MetadataSubscriberServiceTest method testAppDeletionMessage.
@Test
public void testAppDeletionMessage() throws Exception {
Injector injector = getInjector();
// get the alert publisher
CConfiguration cConf = injector.getInstance(CConfiguration.class);
MessagingService messagingService = injector.getInstance(MessagingService.class);
MultiThreadMessagingContext messagingContext = new MultiThreadMessagingContext(messagingService);
AdminEventPublisher publisher = new AdminEventPublisher(cConf, messagingContext);
// get the mds and put some workflow metadata in that, the publish of app deletion message should get the metadata
// deleted
MetadataStorage mds = injector.getInstance(MetadataStorage.class);
// use an app with all program types to get all specification tested
ApplicationId appId = NamespaceId.DEFAULT.app(AllProgramsApp.NAME);
ProgramId workflowId = appId.workflow(AllProgramsApp.NoOpWorkflow.NAME);
// generate an app spec from the application
ApplicationSpecification appSpec = Specifications.from(new AllProgramsApp());
// need to put metadata on workflow since we currently only set or delete workflow metadata
mds.apply(new MetadataMutation.Update(workflowId.toMetadataEntity(), new Metadata(MetadataScope.SYSTEM, Collections.singletonMap("profile", ProfileId.NATIVE.getScopedName()))), MutationOptions.DEFAULT);
Assert.assertEquals(ProfileId.NATIVE.getScopedName(), getProfileProperty(mds, workflowId));
// publish app deletion message
publisher.publishAppDeletion(appId, appSpec);
// Verify the workflow profile metadata is removed because of the publish app deletion message
Tasks.waitFor(true, () -> mds.read(new Read(workflowId.toMetadataEntity())).isEmpty(), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
}
use of io.cdap.cdap.spi.metadata.Read in project cdap by caskdata.
the class ApplicationLifecycleServiceTest method testCapabilityMetaDataDeletion.
@Test
public void testCapabilityMetaDataDeletion() throws Exception {
Class<CapabilityAppWithWorkflow> appWithWorkflowClass = CapabilityAppWithWorkflow.class;
Requirements declaredAnnotation = appWithWorkflowClass.getDeclaredAnnotation(Requirements.class);
Set<String> expected = Arrays.stream(declaredAnnotation.capabilities()).collect(Collectors.toSet());
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, appWithWorkflowClass.getSimpleName(), "1.0.0-SNAPSHOT");
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, appWithWorkflowClass);
File appJarFile = new File(tmpFolder.newFolder(), String.format("%s-%s.jar", artifactId.getName(), artifactId.getVersion().getVersion()));
Locations.linkOrCopyOverwrite(appJar, appJarFile);
appJar.delete();
// deploy app
try {
applicationLifecycleService.deployAppAndArtifact(NamespaceId.DEFAULT, appWithWorkflowClass.getSimpleName(), artifactId, appJarFile, null, null, programId -> {
}, true);
Assert.fail("Expecting exception");
} catch (CapabilityNotAvailableException ex) {
// expected
}
for (String capability : declaredAnnotation.capabilities()) {
CapabilityConfig capabilityConfig = new CapabilityConfig("Test", CapabilityStatus.ENABLED, capability, Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
capabilityWriter.addOrUpdateCapability(capability, CapabilityStatus.ENABLED, capabilityConfig);
}
applicationLifecycleService.deployAppAndArtifact(NamespaceId.DEFAULT, appWithWorkflowClass.getSimpleName(), artifactId, appJarFile, null, null, programId -> {
}, true);
// Check for the capability metadata
ApplicationId appId = NamespaceId.DEFAULT.app(appWithWorkflowClass.getSimpleName());
MetadataEntity appMetadataId = appId.toMetadataEntity();
Assert.assertFalse(metadataStorage.read(new Read(appMetadataId, MetadataScope.SYSTEM, MetadataKind.PROPERTY)).isEmpty());
Map<String, String> metadataProperties = metadataStorage.read(new Read(appMetadataId)).getProperties(MetadataScope.SYSTEM);
String capabilityMetaData = metadataProperties.get(AppSystemMetadataWriter.CAPABILITY_TAG);
Set<String> actual = Arrays.stream(capabilityMetaData.split(AppSystemMetadataWriter.CAPABILITY_DELIMITER)).collect(Collectors.toSet());
Assert.assertEquals(expected, actual);
// Remove the application and verify that all metadata is removed
applicationLifecycleService.removeApplication(appId);
Assert.assertTrue(metadataStorage.read(new Read(appMetadataId)).isEmpty());
}
use of io.cdap.cdap.spi.metadata.Read in project cdap by caskdata.
the class ApplicationLifecycleServiceTest method testMetadataEmitInConfigure.
@Test
public void testMetadataEmitInConfigure() throws Exception {
deploy(MetadataEmitApp.class, HttpResponseStatus.OK.code(), Constants.Gateway.API_VERSION_3_TOKEN, NamespaceId.DEFAULT.getNamespace());
ApplicationId appId = NamespaceId.DEFAULT.app(MetadataEmitApp.NAME);
// check app user metadata gets emitted correctly
Metadata userMetadata = metadataStorage.read(new Read(appId.toMetadataEntity(), MetadataScope.USER));
Assert.assertEquals(MetadataEmitApp.USER_METADATA.getProperties(), userMetadata.getProperties(MetadataScope.USER));
Assert.assertEquals(MetadataEmitApp.USER_METADATA.getTags(), userMetadata.getTags(MetadataScope.USER));
Metadata systemMetadata = metadataStorage.read(new Read(appId.toMetadataEntity(), MetadataScope.SYSTEM));
// here system properties will contain what emitted in the app + the ones emitted by the platform,
// we only compare the ones emitted by the app
Map<String, String> sysProperties = systemMetadata.getProperties(MetadataScope.SYSTEM);
MetadataEmitApp.SYS_METADATA.getProperties().forEach((key, val) -> {
Assert.assertEquals(val, sysProperties.get(key));
});
// check the tags contain all the tags emitted by the app
Assert.assertTrue(systemMetadata.getTags(MetadataScope.SYSTEM).containsAll(MetadataEmitApp.SYS_METADATA.getTags()));
applicationLifecycleService.removeApplication(appId);
Assert.assertTrue(metadataStorage.read(new Read(appId.toMetadataEntity())).isEmpty());
}
Aggregations