use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.
the class ArtifactHttpHandlerTest method testSystemArtifactIsolation.
/**
* Tests that system artifacts can be accessed in a user namespace only if appropriately scoped.
*/
@Test
public void testSystemArtifactIsolation() throws Exception {
// add the app in the default namespace
ArtifactId defaultId = NamespaceId.DEFAULT.artifact("wordcount", "1.0.0");
// add system artifact wordcount
addSystemArtifacts();
// test get /artifacts
Set<ArtifactSummary> expectedArtifacts = Sets.newHashSet(new ArtifactSummary("wordcount", "1.0.0", ArtifactScope.SYSTEM));
Set<ArtifactSummary> actualArtifacts = getArtifacts(NamespaceId.DEFAULT);
Assert.assertEquals(expectedArtifacts, actualArtifacts);
// test get /artifacts?scope=system
expectedArtifacts = Sets.newHashSet(new ArtifactSummary("wordcount", "1.0.0", ArtifactScope.SYSTEM));
actualArtifacts = getArtifacts(NamespaceId.DEFAULT, ArtifactScope.SYSTEM);
Assert.assertEquals(expectedArtifacts, actualArtifacts);
// test get /artifacts?scope=user
expectedArtifacts = Sets.newHashSet();
actualArtifacts = getArtifacts(NamespaceId.DEFAULT, ArtifactScope.USER);
Assert.assertEquals(expectedArtifacts, actualArtifacts);
// test get /artifacts/wordcount?scope=user
actualArtifacts = getArtifacts(NamespaceId.DEFAULT, "wordcount", ArtifactScope.USER);
Assert.assertNull(actualArtifacts);
// test get /artifacts/wordcount?scope=system
expectedArtifacts = Sets.newHashSet(new ArtifactSummary("wordcount", "1.0.0", ArtifactScope.SYSTEM));
actualArtifacts = getArtifacts(NamespaceId.DEFAULT, "wordcount", ArtifactScope.SYSTEM);
Assert.assertEquals(expectedArtifacts, actualArtifacts);
// test get /artifacts/wordcount/versions/1.0.0?scope=user
ArtifactInfo actualInfo = getArtifact(defaultId, ArtifactScope.USER);
Assert.assertEquals(null, actualInfo);
// test get /artifacts/wordcount/versions/1.0.0?scope=system
actualInfo = getArtifact(defaultId, ArtifactScope.SYSTEM);
Assert.assertEquals("wordcount", actualInfo.getName());
Assert.assertEquals("1.0.0", actualInfo.getVersion());
// test delete /default/artifacts/wordcount/versions/1.0.0
deleteArtifact(Id.Artifact.fromEntityId(defaultId), 404);
cleanupSystemArtifactsDirectory();
}
use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.
the class CoreSchedulerServiceTest method testRunScheduledJobs.
@Test
@Category(XSlowTests.class)
public void testRunScheduledJobs() throws Exception {
CConfiguration cConf = getInjector().getInstance(CConfiguration.class);
dataEventTopic = NamespaceId.SYSTEM.topic(cConf.get(Constants.Dataset.DATA_EVENT_TOPIC));
// Deploy the app with version
Id.Artifact appArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "appwithschedules", VERSION1);
addAppArtifact(appArtifactId, AppWithFrequentScheduledWorkflows.class);
AppRequest<? extends Config> appRequest = new AppRequest<>(new ArtifactSummary(appArtifactId.getName(), appArtifactId.getVersion().getVersion()));
deploy(APP_ID, appRequest);
// Resume the schedule because schedules are initialized as paused
enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1);
enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_2);
enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
for (int i = 0; i < 5; i++) {
testNewPartition(i + 1);
}
// Enable COMPOSITE_SCHEDULE before publishing events to DATASET_NAME2
enableSchedule(AppWithFrequentScheduledWorkflows.COMPOSITE_SCHEDULE);
// disable the two partition schedules, send them notifications (but they should not trigger)
int runs1 = getRuns(WORKFLOW_1, ProgramRunStatus.ALL);
int runs2 = getRuns(WORKFLOW_2, ProgramRunStatus.ALL);
disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME1);
long minPublishTime = System.currentTimeMillis();
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
// This would make sure the subscriber has processed the data event
waitUntilProcessed(dataEventTopic, minPublishTime);
// Both workflows must run at least once.
// If the testNewPartition() loop took longer than expected, it may be more (quartz fired multiple times)
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.COMPLETED) > 0 && getRuns(SCHEDULED_WORKFLOW_2, ProgramRunStatus.COMPLETED) > 0;
}
}, 10, TimeUnit.SECONDS);
// There shouldn't be any partition trigger in the job queue
Assert.assertFalse(Iterables.any(getAllJobs(), new Predicate<Job>() {
@Override
public boolean apply(Job job) {
return job.getSchedule().getTrigger() instanceof ProtoTrigger.PartitionTrigger;
}
}));
ProgramId compositeWorkflow = APP_ID.workflow(AppWithFrequentScheduledWorkflows.COMPOSITE_WORKFLOW);
// Workflow scheduled with the composite trigger has never been started
Assert.assertEquals(0, getRuns(compositeWorkflow, ProgramRunStatus.ALL));
// Publish two more new partition notifications to satisfy the partition trigger in the composite trigger,
// and thus the whole composite trigger will be satisfied
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
minPublishTime = System.currentTimeMillis();
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
// This would make sure the subscriber has processed the data event
waitUntilProcessed(dataEventTopic, minPublishTime);
// Wait for 1 run to complete for compositeWorkflow
waitForCompleteRuns(1, compositeWorkflow);
for (RunRecordMeta runRecordMeta : store.getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, Integer.MAX_VALUE).values()) {
Map<String, String> sysArgs = runRecordMeta.getSystemArgs();
Assert.assertNotNull(sysArgs);
TriggeringScheduleInfo scheduleInfo = GSON.fromJson(sysArgs.get(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO), TriggeringScheduleInfo.class);
Assert.assertEquals(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1, scheduleInfo.getName());
List<TriggerInfo> triggerInfos = scheduleInfo.getTriggerInfos();
// Only one notification is enough to satisfy Time Trigger
Assert.assertEquals(1, triggerInfos.size());
Assert.assertEquals(TriggerInfo.Type.TIME, triggerInfos.get(0).getType());
}
// Also verify that the two partition schedules did not trigger
Assert.assertEquals(runs1, getRuns(WORKFLOW_1, ProgramRunStatus.ALL));
Assert.assertEquals(runs2, getRuns(WORKFLOW_2, ProgramRunStatus.ALL));
// enable partition schedule 2
enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
testScheduleUpdate("disable");
testScheduleUpdate("update");
testScheduleUpdate("delete");
}
use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.
the class ProgramLifecycleHttpHandlerTest method testHistory.
private void testHistory(Class<?> app, Id.Program program) throws Exception {
String namespace = program.getNamespaceId();
try {
deploy(app, Constants.Gateway.API_VERSION_3_TOKEN, namespace);
verifyProgramHistory(program.toEntityId());
} catch (Exception e) {
LOG.error("Got exception: ", e);
} finally {
deleteApp(program.getApplication(), 200);
}
ApplicationId appId = new ApplicationId(namespace, program.getApplicationId(), VERSION1);
ProgramId programId = appId.program(program.getType(), program.getId());
try {
Id.Artifact artifactId = Id.Artifact.from(program.getNamespace(), app.getSimpleName(), "1.0.0");
addAppArtifact(artifactId, app);
AppRequest<Config> request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), null);
Assert.assertEquals(200, deploy(appId, request).getStatusLine().getStatusCode());
verifyProgramHistory(programId);
} catch (Exception e) {
LOG.error("Got exception: ", e);
} finally {
deleteApp(appId, 200);
}
}
use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.
the class ProgramLifecycleHttpHandlerTest method testUpdateSchedulesFlag.
@Test
public void testUpdateSchedulesFlag() throws Exception {
// deploy an app with schedule
AppWithSchedule.AppConfig config = new AppWithSchedule.AppConfig(true, true, true);
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.fromEntityId(TEST_NAMESPACE_META2.getNamespaceId()), AppWithSchedule.NAME, VERSION1);
addAppArtifact(artifactId, AppWithSchedule.class);
AppRequest<? extends Config> request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
ApplicationId defaultAppId = TEST_NAMESPACE_META2.getNamespaceId().app(AppWithSchedule.NAME);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
List<ScheduleDetail> actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
// none of the schedules will be added as we have set update schedules to be false
Assert.assertEquals(0, actualSchedules.size());
request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, true);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
Assert.assertEquals(2, actualSchedules.size());
// with workflow, without schedule
config = new AppWithSchedule.AppConfig(true, false, false);
request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
// schedule should not be updated
actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
Assert.assertEquals(2, actualSchedules.size());
// without workflow and schedule, schedule should be deleted
config = new AppWithSchedule.AppConfig(false, false, false);
request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
Assert.assertEquals(0, actualSchedules.size());
// with workflow and one schedule, schedule should be added
config = new AppWithSchedule.AppConfig(true, true, false);
request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, true);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
Assert.assertEquals(1, actualSchedules.size());
Assert.assertEquals("SampleSchedule", actualSchedules.get(0).getName());
// with workflow and two schedules, but update-schedules is false, so 2nd schedule should not get added
config = new AppWithSchedule.AppConfig(true, true, true);
request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
Assert.assertEquals(1, actualSchedules.size());
Assert.assertEquals("SampleSchedule", actualSchedules.get(0).getName());
// same config, but update-schedule flag is true now, so 2 schedules should be available now
request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, true);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
Assert.assertEquals(2, actualSchedules.size());
}
use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.
the class ProgramLifecycleHttpHandlerTest method testSchedules.
@Test
public void testSchedules() throws Exception {
// deploy an app with schedule
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.fromEntityId(TEST_NAMESPACE_META1.getNamespaceId()), AppWithSchedule.NAME, VERSION1);
addAppArtifact(artifactId, AppWithSchedule.class);
AppRequest<? extends Config> request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()));
ApplicationId defaultAppId = TEST_NAMESPACE_META1.getNamespaceId().app(AppWithSchedule.NAME);
Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
// deploy another version of the app
ApplicationId appV2Id = TEST_NAMESPACE_META1.getNamespaceId().app(AppWithSchedule.NAME, VERSION2);
Assert.assertEquals(200, deploy(appV2Id, request).getStatusLine().getStatusCode());
// list schedules for default version app, for the workflow and for the app, they should be same
List<ScheduleDetail> schedules = getSchedules(TEST_NAMESPACE1, AppWithSchedule.NAME, AppWithSchedule.WORKFLOW_NAME);
Assert.assertEquals(1, schedules.size());
ScheduleDetail schedule = schedules.get(0);
Assert.assertEquals(SchedulableProgramType.WORKFLOW, schedule.getProgram().getProgramType());
Assert.assertEquals(AppWithSchedule.WORKFLOW_NAME, schedule.getProgram().getProgramName());
Assert.assertEquals(new TimeTrigger("0/15 * * * * ?"), schedule.getTrigger());
// there should be two schedules now
List<ScheduleDetail> schedulesForApp = listSchedules(TEST_NAMESPACE1, AppWithSchedule.NAME, null);
Assert.assertEquals(1, schedulesForApp.size());
Assert.assertEquals(schedules, schedulesForApp);
List<ScheduleDetail> schedules2 = getSchedules(TEST_NAMESPACE1, AppWithSchedule.NAME, VERSION2, AppWithSchedule.WORKFLOW_NAME);
Assert.assertEquals(1, schedules2.size());
ScheduleDetail schedule2 = schedules2.get(0);
Assert.assertEquals(SchedulableProgramType.WORKFLOW, schedule2.getProgram().getProgramType());
Assert.assertEquals(AppWithSchedule.WORKFLOW_NAME, schedule2.getProgram().getProgramName());
Assert.assertEquals(new TimeTrigger("0/15 * * * * ?"), schedule2.getTrigger());
String newSchedule = "newTimeSchedule";
testAddSchedule(newSchedule);
testDeleteSchedule(appV2Id, newSchedule);
testUpdateSchedule(appV2Id);
}
Aggregations