use of io.cdap.cdap.app.store.Store in project cdap by cdapio.
the class MetadataSubscriberServiceTest method testProfileMetadata.
@Test
public void testProfileMetadata() throws Exception {
Injector injector = getInjector();
ApplicationSpecification appSpec = Specifications.from(new AppWithWorkflow());
ApplicationId appId = NamespaceId.DEFAULT.app(appSpec.getName());
ProgramId workflowId = appId.workflow("SampleWorkflow");
ScheduleId scheduleId = appId.schedule("tsched1");
// publish a creation of a schedule that will never exist
// this tests that such a message is eventually discarded
// note that for this test, we configure a fast retry strategy and a small number of retries
// therefore this will cost only a few seconds delay
publishBogusCreationEvent();
// get the mds should be empty property since we haven't started the MetadataSubscriberService
MetadataStorage mds = injector.getInstance(MetadataStorage.class);
Assert.assertEquals(Collections.emptyMap(), mds.read(new Read(workflowId.toMetadataEntity())).getProperties());
Assert.assertEquals(Collections.emptyMap(), mds.read(new Read(scheduleId.toMetadataEntity())).getProperties());
// add a app with workflow to app meta store
// note: since we bypass the app-fabric when adding this app, no ENTITY_CREATION message
// will be published for the app (it happens in app lifecycle service). Therefore this
// app must exist before assigning the profile for the namespace, otherwise the app's
// programs will not receive the profile metadata.
Store store = injector.getInstance(DefaultStore.class);
store.addApplication(appId, appSpec);
// set default namespace to use the profile, since now MetadataSubscriberService is not started,
// it should not affect the mds
PreferencesService preferencesService = injector.getInstance(PreferencesService.class);
preferencesService.setProperties(NamespaceId.DEFAULT, Collections.singletonMap(SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName()));
// add a schedule to schedule store
ProgramScheduleService scheduleService = injector.getInstance(ProgramScheduleService.class);
scheduleService.add(new ProgramSchedule("tsched1", "one time schedule", workflowId, Collections.emptyMap(), new TimeTrigger("* * ? * 1"), ImmutableList.of()));
// add a new profile in default namespace
ProfileService profileService = injector.getInstance(ProfileService.class);
ProfileId myProfile = new ProfileId(NamespaceId.DEFAULT.getNamespace(), "MyProfile");
Profile profile1 = new Profile("MyProfile", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
profileService.saveProfile(myProfile, profile1);
// add a second profile in default namespace
ProfileId myProfile2 = new ProfileId(NamespaceId.DEFAULT.getNamespace(), "MyProfile2");
Profile profile2 = new Profile("MyProfile2", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
profileService.saveProfile(myProfile2, profile2);
try {
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// set default namespace to use my profile
preferencesService.setProperties(NamespaceId.DEFAULT, Collections.singletonMap(SystemArguments.PROFILE_NAME, "USER:MyProfile"));
// Verify the workflow profile metadata is updated to my profile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to my profile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// set app level to use my profile 2
preferencesService.setProperties(appId, Collections.singletonMap(SystemArguments.PROFILE_NAME, "USER:MyProfile2"));
// set instance level to system profile
preferencesService.setProperties(Collections.singletonMap(SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName()));
// Verify the workflow profile metadata is updated to MyProfile2 which is at app level
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to MyProfile2 which is at app level
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove the preferences at instance level, should not affect the metadata
preferencesService.deleteProperties();
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove app level pref should let the programs/schedules use ns level pref
preferencesService.deleteProperties(appId);
// Verify the workflow profile metadata is updated to MyProfile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to MyProfile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove ns level pref so no pref is there
preferencesService.deleteProperties(NamespaceId.DEFAULT);
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
} finally {
// stop and clean up the store
preferencesService.deleteProperties(NamespaceId.DEFAULT);
preferencesService.deleteProperties();
preferencesService.deleteProperties(appId);
store.removeAll(NamespaceId.DEFAULT);
scheduleService.delete(scheduleId);
profileService.disableProfile(myProfile);
profileService.disableProfile(myProfile2);
profileService.deleteAllProfiles(myProfile.getNamespaceId());
mds.apply(new MetadataMutation.Drop(workflowId.toMetadataEntity()), MutationOptions.DEFAULT);
mds.apply(new MetadataMutation.Drop(scheduleId.toMetadataEntity()), MutationOptions.DEFAULT);
}
}
use of io.cdap.cdap.app.store.Store in project cdap by caskdata.
the class TwillRunnableTest method testDatasetOpExecutorTwillRunnableInjector.
@Test
public void testDatasetOpExecutorTwillRunnableInjector() {
Injector injector = DatasetOpExecutorServerTwillRunnable.createInjector(CConfiguration.create(), HBaseConfiguration.create(), "");
Store store = injector.getInstance(Store.class);
Assert.assertNotNull(store);
NamespaceQueryAdmin namespaceQueryAdmin = injector.getInstance(NamespaceQueryAdmin.class);
Assert.assertNotNull(namespaceQueryAdmin);
}
use of io.cdap.cdap.app.store.Store in project cdap by caskdata.
the class NoSqlDefaultStoreTest method getDescOrderUnsupportedStore.
/**
* @return store set up so that any try to do a table scan with {@link SortOrder#DESC} would throw and
* {@link UnsupportedOperationException}. Store in this case should fall back to resorting in memory.
* It will also use a small batch size to test multiple reorder batches
*/
private Store getDescOrderUnsupportedStore() throws TransactionException {
TransactionRunner runner = injector.getInstance(TransactionRunner.class);
Store store = new DefaultStore(runner, 20);
return store;
}
use of io.cdap.cdap.app.store.Store in project cdap by caskdata.
the class MetricsSuiteTestBase method beforeClass.
@BeforeClass
public static void beforeClass() throws Exception {
if (nestedStartCount++ > 0) {
return;
}
conf = CConfiguration.create();
conf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().getAbsolutePath());
conf.set(Constants.Metrics.ADDRESS, InetAddress.getLoopbackAddress().getHostAddress());
conf.setBoolean(Constants.Dangerous.UNRECOVERABLE_RESET, true);
conf.setBoolean(Constants.Metrics.CONFIG_AUTHENTICATION_REQUIRED, true);
conf.set(Constants.Metrics.CLUSTER_NAME, CLUSTER);
Injector injector = startMetricsService(conf);
store = injector.getInstance(Store.class);
locationFactory = injector.getInstance(LocationFactory.class);
metricStore = injector.getInstance(MetricStore.class);
}
use of io.cdap.cdap.app.store.Store in project cdap by caskdata.
the class LineageAdmin method computeWorkflowInnerPrograms.
/**
* Compute the inner programs and program runs based on the program relations and add them to the collections.
*
* @param toVisitPrograms the collection of next to visit programs
* @param programWorkflowMap the program workflow run id map
* @param programRelations the program relations of the dataset
*/
private void computeWorkflowInnerPrograms(Set<ProgramId> toVisitPrograms, Map<ProgramRunId, ProgramRunId> programWorkflowMap, Set<Relation> programRelations) {
// Step 1 walk through the program relations, filter out the possible mapreduce and spark programs that
// could be in the workflow, and get the appSpec for the program, to determine what other programs
// are in the workflow
Map<ApplicationId, ApplicationSpecification> appSpecs = new HashMap<>();
Set<ProgramRunId> possibleInnerPrograms = new HashSet<>();
programRelations.forEach(relation -> {
ProgramType type = relation.getProgram().getType();
if (type.equals(ProgramType.MAPREDUCE) || type.equals(ProgramType.SPARK)) {
possibleInnerPrograms.add(relation.getProgramRunId());
appSpecs.computeIfAbsent(relation.getProgram().getParent(), store::getApplication);
}
});
// Step 2, get the run record for all the possible inner programs, the run record contains the
// workflow information, fetch the workflow id and add them to the map
Map<ProgramRunId, RunRecordDetail> runRecords = store.getRuns(possibleInnerPrograms);
Set<ProgramRunId> workflowRunIds = new HashSet<>();
runRecords.entrySet().stream().filter(e -> e.getValue() != null).forEach(entry -> {
ProgramRunId programRunId = entry.getKey();
RunRecordDetail runRecord = entry.getValue();
if (runRecord.getSystemArgs().containsKey(ProgramOptionConstants.WORKFLOW_RUN_ID)) {
ProgramRunId wfRunId = extractWorkflowRunId(programRunId, runRecord);
programWorkflowMap.put(programRunId, wfRunId);
workflowRunIds.add(wfRunId);
}
});
// Step 3, fetch run records of the workflow, the properties of the workflow run record has all
// the inner program run ids, compare them with the app spec to get the type of the program
runRecords = store.getRuns(workflowRunIds);
runRecords.entrySet().stream().filter(e -> e.getValue() != null).forEach(entry -> {
ProgramRunId programRunId = entry.getKey();
RunRecordDetail runRecord = entry.getValue();
extractAndAddInnerPrograms(toVisitPrograms, programWorkflowMap, appSpecs, programRunId, runRecord);
});
}
Aggregations