Search in sources :

Example 56 with ArtifactSummary

use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.

the class ArtifactHttpHandlerTest method testSystemArtifactIsolation.

/**
 * Tests that system artifacts can be accessed in a user namespace only if appropriately scoped.
 */
@Test
public void testSystemArtifactIsolation() throws Exception {
    // add the app in the default namespace
    ArtifactId defaultId = NamespaceId.DEFAULT.artifact("wordcount", "1.0.0");
    // add system artifact wordcount
    addSystemArtifacts();
    // test get /artifacts
    Set<ArtifactSummary> expectedArtifacts = Sets.newHashSet(new ArtifactSummary("wordcount", "1.0.0", ArtifactScope.SYSTEM));
    Set<ArtifactSummary> actualArtifacts = getArtifacts(NamespaceId.DEFAULT);
    Assert.assertEquals(expectedArtifacts, actualArtifacts);
    // test get /artifacts?scope=system
    expectedArtifacts = Sets.newHashSet(new ArtifactSummary("wordcount", "1.0.0", ArtifactScope.SYSTEM));
    actualArtifacts = getArtifacts(NamespaceId.DEFAULT, ArtifactScope.SYSTEM);
    Assert.assertEquals(expectedArtifacts, actualArtifacts);
    // test get /artifacts?scope=user
    expectedArtifacts = Sets.newHashSet();
    actualArtifacts = getArtifacts(NamespaceId.DEFAULT, ArtifactScope.USER);
    Assert.assertEquals(expectedArtifacts, actualArtifacts);
    // test get /artifacts/wordcount?scope=user
    actualArtifacts = getArtifacts(NamespaceId.DEFAULT, "wordcount", ArtifactScope.USER);
    Assert.assertNull(actualArtifacts);
    // test get /artifacts/wordcount?scope=system
    expectedArtifacts = Sets.newHashSet(new ArtifactSummary("wordcount", "1.0.0", ArtifactScope.SYSTEM));
    actualArtifacts = getArtifacts(NamespaceId.DEFAULT, "wordcount", ArtifactScope.SYSTEM);
    Assert.assertEquals(expectedArtifacts, actualArtifacts);
    // test get /artifacts/wordcount/versions/1.0.0?scope=user
    ArtifactInfo actualInfo = getArtifact(defaultId, ArtifactScope.USER);
    Assert.assertEquals(null, actualInfo);
    // test get /artifacts/wordcount/versions/1.0.0?scope=system
    actualInfo = getArtifact(defaultId, ArtifactScope.SYSTEM);
    Assert.assertEquals("wordcount", actualInfo.getName());
    Assert.assertEquals("1.0.0", actualInfo.getVersion());
    // test delete /default/artifacts/wordcount/versions/1.0.0
    deleteArtifact(Id.Artifact.fromEntityId(defaultId), 404);
    cleanupSystemArtifactsDirectory();
}
Also used : ArtifactSummary(co.cask.cdap.api.artifact.ArtifactSummary) ArtifactId(co.cask.cdap.proto.id.ArtifactId) ArtifactInfo(co.cask.cdap.api.artifact.ArtifactInfo) Test(org.junit.Test)

Example 57 with ArtifactSummary

use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.

the class CoreSchedulerServiceTest method testRunScheduledJobs.

@Test
@Category(XSlowTests.class)
public void testRunScheduledJobs() throws Exception {
    CConfiguration cConf = getInjector().getInstance(CConfiguration.class);
    dataEventTopic = NamespaceId.SYSTEM.topic(cConf.get(Constants.Dataset.DATA_EVENT_TOPIC));
    // Deploy the app with version
    Id.Artifact appArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "appwithschedules", VERSION1);
    addAppArtifact(appArtifactId, AppWithFrequentScheduledWorkflows.class);
    AppRequest<? extends Config> appRequest = new AppRequest<>(new ArtifactSummary(appArtifactId.getName(), appArtifactId.getVersion().getVersion()));
    deploy(APP_ID, appRequest);
    // Resume the schedule because schedules are initialized as paused
    enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1);
    enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_2);
    enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
    enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
    for (int i = 0; i < 5; i++) {
        testNewPartition(i + 1);
    }
    // Enable COMPOSITE_SCHEDULE before publishing events to DATASET_NAME2
    enableSchedule(AppWithFrequentScheduledWorkflows.COMPOSITE_SCHEDULE);
    // disable the two partition schedules, send them notifications (but they should not trigger)
    int runs1 = getRuns(WORKFLOW_1, ProgramRunStatus.ALL);
    int runs2 = getRuns(WORKFLOW_2, ProgramRunStatus.ALL);
    disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
    disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME1);
    long minPublishTime = System.currentTimeMillis();
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
    // This would make sure the subscriber has processed the data event
    waitUntilProcessed(dataEventTopic, minPublishTime);
    // Both workflows must run at least once.
    // If the testNewPartition() loop took longer than expected, it may be more (quartz fired multiple times)
    Tasks.waitFor(true, new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            return getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.COMPLETED) > 0 && getRuns(SCHEDULED_WORKFLOW_2, ProgramRunStatus.COMPLETED) > 0;
        }
    }, 10, TimeUnit.SECONDS);
    // There shouldn't be any partition trigger in the job queue
    Assert.assertFalse(Iterables.any(getAllJobs(), new Predicate<Job>() {

        @Override
        public boolean apply(Job job) {
            return job.getSchedule().getTrigger() instanceof ProtoTrigger.PartitionTrigger;
        }
    }));
    ProgramId compositeWorkflow = APP_ID.workflow(AppWithFrequentScheduledWorkflows.COMPOSITE_WORKFLOW);
    // Workflow scheduled with the composite trigger has never been started
    Assert.assertEquals(0, getRuns(compositeWorkflow, ProgramRunStatus.ALL));
    // Publish two more new partition notifications to satisfy the partition trigger in the composite trigger,
    // and thus the whole composite trigger will be satisfied
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
    minPublishTime = System.currentTimeMillis();
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
    // This would make sure the subscriber has processed the data event
    waitUntilProcessed(dataEventTopic, minPublishTime);
    // Wait for 1 run to complete for compositeWorkflow
    waitForCompleteRuns(1, compositeWorkflow);
    for (RunRecordMeta runRecordMeta : store.getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, Integer.MAX_VALUE).values()) {
        Map<String, String> sysArgs = runRecordMeta.getSystemArgs();
        Assert.assertNotNull(sysArgs);
        TriggeringScheduleInfo scheduleInfo = GSON.fromJson(sysArgs.get(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO), TriggeringScheduleInfo.class);
        Assert.assertEquals(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1, scheduleInfo.getName());
        List<TriggerInfo> triggerInfos = scheduleInfo.getTriggerInfos();
        // Only one notification is enough to satisfy Time Trigger
        Assert.assertEquals(1, triggerInfos.size());
        Assert.assertEquals(TriggerInfo.Type.TIME, triggerInfos.get(0).getType());
    }
    // Also verify that the two partition schedules did not trigger
    Assert.assertEquals(runs1, getRuns(WORKFLOW_1, ProgramRunStatus.ALL));
    Assert.assertEquals(runs2, getRuns(WORKFLOW_2, ProgramRunStatus.ALL));
    // enable partition schedule 2
    enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
    testScheduleUpdate("disable");
    testScheduleUpdate("update");
    testScheduleUpdate("delete");
}
Also used : RunRecordMeta(co.cask.cdap.internal.app.store.RunRecordMeta) TriggerInfo(co.cask.cdap.api.schedule.TriggerInfo) TriggeringScheduleInfo(co.cask.cdap.api.schedule.TriggeringScheduleInfo) ProgramId(co.cask.cdap.proto.id.ProgramId) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) ConflictException(co.cask.cdap.common.ConflictException) AlreadyExistsException(co.cask.cdap.common.AlreadyExistsException) NotFoundException(co.cask.cdap.common.NotFoundException) AppRequest(co.cask.cdap.proto.artifact.AppRequest) Predicate(com.google.common.base.Predicate) ArtifactSummary(co.cask.cdap.api.artifact.ArtifactSummary) WorkflowId(co.cask.cdap.proto.id.WorkflowId) TopicId(co.cask.cdap.proto.id.TopicId) ProgramId(co.cask.cdap.proto.id.ProgramId) Id(co.cask.cdap.common.id.Id) NamespaceId(co.cask.cdap.proto.id.NamespaceId) MessageId(co.cask.cdap.messaging.data.MessageId) ScheduleId(co.cask.cdap.proto.id.ScheduleId) DatasetId(co.cask.cdap.proto.id.DatasetId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) Job(co.cask.cdap.internal.app.runtime.schedule.queue.Job) PartitionTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger) Category(org.junit.experimental.categories.Category) Test(org.junit.Test)

Example 58 with ArtifactSummary

use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.

the class ProgramLifecycleHttpHandlerTest method testHistory.

private void testHistory(Class<?> app, Id.Program program) throws Exception {
    String namespace = program.getNamespaceId();
    try {
        deploy(app, Constants.Gateway.API_VERSION_3_TOKEN, namespace);
        verifyProgramHistory(program.toEntityId());
    } catch (Exception e) {
        LOG.error("Got exception: ", e);
    } finally {
        deleteApp(program.getApplication(), 200);
    }
    ApplicationId appId = new ApplicationId(namespace, program.getApplicationId(), VERSION1);
    ProgramId programId = appId.program(program.getType(), program.getId());
    try {
        Id.Artifact artifactId = Id.Artifact.from(program.getNamespace(), app.getSimpleName(), "1.0.0");
        addAppArtifact(artifactId, app);
        AppRequest<Config> request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), null);
        Assert.assertEquals(200, deploy(appId, request).getStatusLine().getStatusCode());
        verifyProgramHistory(programId);
    } catch (Exception e) {
        LOG.error("Got exception: ", e);
    } finally {
        deleteApp(appId, 200);
    }
}
Also used : ArtifactSummary(co.cask.cdap.api.artifact.ArtifactSummary) Config(co.cask.cdap.api.Config) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) ProgramId(co.cask.cdap.proto.id.ProgramId) Id(co.cask.cdap.common.id.Id) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ProgramId(co.cask.cdap.proto.id.ProgramId) IOException(java.io.IOException) NotFoundException(co.cask.cdap.common.NotFoundException) AppRequest(co.cask.cdap.proto.artifact.AppRequest)

Example 59 with ArtifactSummary

use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.

the class ProgramLifecycleHttpHandlerTest method testUpdateSchedulesFlag.

@Test
public void testUpdateSchedulesFlag() throws Exception {
    // deploy an app with schedule
    AppWithSchedule.AppConfig config = new AppWithSchedule.AppConfig(true, true, true);
    Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.fromEntityId(TEST_NAMESPACE_META2.getNamespaceId()), AppWithSchedule.NAME, VERSION1);
    addAppArtifact(artifactId, AppWithSchedule.class);
    AppRequest<? extends Config> request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
    ApplicationId defaultAppId = TEST_NAMESPACE_META2.getNamespaceId().app(AppWithSchedule.NAME);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    List<ScheduleDetail> actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
    // none of the schedules will be added as we have set update schedules to be false
    Assert.assertEquals(0, actualSchedules.size());
    request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, true);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
    Assert.assertEquals(2, actualSchedules.size());
    // with workflow, without schedule
    config = new AppWithSchedule.AppConfig(true, false, false);
    request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    // schedule should not be updated
    actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
    Assert.assertEquals(2, actualSchedules.size());
    // without workflow and schedule, schedule should be deleted
    config = new AppWithSchedule.AppConfig(false, false, false);
    request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
    Assert.assertEquals(0, actualSchedules.size());
    // with workflow and  one schedule, schedule should be added
    config = new AppWithSchedule.AppConfig(true, true, false);
    request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, true);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
    Assert.assertEquals(1, actualSchedules.size());
    Assert.assertEquals("SampleSchedule", actualSchedules.get(0).getName());
    // with workflow and two schedules, but update-schedules is false, so 2nd schedule should not get added
    config = new AppWithSchedule.AppConfig(true, true, true);
    request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, false);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
    Assert.assertEquals(1, actualSchedules.size());
    Assert.assertEquals("SampleSchedule", actualSchedules.get(0).getName());
    // same config, but update-schedule flag is true now, so 2 schedules should be available now
    request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()), config, null, null, true);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    actualSchedules = listSchedules(TEST_NAMESPACE_META2.getNamespaceId().getNamespace(), defaultAppId.getApplication(), defaultAppId.getVersion());
    Assert.assertEquals(2, actualSchedules.size());
}
Also used : ArtifactSummary(co.cask.cdap.api.artifact.ArtifactSummary) ProgramId(co.cask.cdap.proto.id.ProgramId) Id(co.cask.cdap.common.id.Id) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) AppWithSchedule(co.cask.cdap.AppWithSchedule) ApplicationId(co.cask.cdap.proto.id.ApplicationId) AppRequest(co.cask.cdap.proto.artifact.AppRequest) Test(org.junit.Test)

Example 60 with ArtifactSummary

use of co.cask.cdap.api.artifact.ArtifactSummary in project cdap by caskdata.

the class ProgramLifecycleHttpHandlerTest method testSchedules.

@Test
public void testSchedules() throws Exception {
    // deploy an app with schedule
    Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.fromEntityId(TEST_NAMESPACE_META1.getNamespaceId()), AppWithSchedule.NAME, VERSION1);
    addAppArtifact(artifactId, AppWithSchedule.class);
    AppRequest<? extends Config> request = new AppRequest<>(new ArtifactSummary(artifactId.getName(), artifactId.getVersion().getVersion()));
    ApplicationId defaultAppId = TEST_NAMESPACE_META1.getNamespaceId().app(AppWithSchedule.NAME);
    Assert.assertEquals(200, deploy(defaultAppId, request).getStatusLine().getStatusCode());
    // deploy another version of the app
    ApplicationId appV2Id = TEST_NAMESPACE_META1.getNamespaceId().app(AppWithSchedule.NAME, VERSION2);
    Assert.assertEquals(200, deploy(appV2Id, request).getStatusLine().getStatusCode());
    // list schedules for default version app, for the workflow and for the app, they should be same
    List<ScheduleDetail> schedules = getSchedules(TEST_NAMESPACE1, AppWithSchedule.NAME, AppWithSchedule.WORKFLOW_NAME);
    Assert.assertEquals(1, schedules.size());
    ScheduleDetail schedule = schedules.get(0);
    Assert.assertEquals(SchedulableProgramType.WORKFLOW, schedule.getProgram().getProgramType());
    Assert.assertEquals(AppWithSchedule.WORKFLOW_NAME, schedule.getProgram().getProgramName());
    Assert.assertEquals(new TimeTrigger("0/15 * * * * ?"), schedule.getTrigger());
    // there should be two schedules now
    List<ScheduleDetail> schedulesForApp = listSchedules(TEST_NAMESPACE1, AppWithSchedule.NAME, null);
    Assert.assertEquals(1, schedulesForApp.size());
    Assert.assertEquals(schedules, schedulesForApp);
    List<ScheduleDetail> schedules2 = getSchedules(TEST_NAMESPACE1, AppWithSchedule.NAME, VERSION2, AppWithSchedule.WORKFLOW_NAME);
    Assert.assertEquals(1, schedules2.size());
    ScheduleDetail schedule2 = schedules2.get(0);
    Assert.assertEquals(SchedulableProgramType.WORKFLOW, schedule2.getProgram().getProgramType());
    Assert.assertEquals(AppWithSchedule.WORKFLOW_NAME, schedule2.getProgram().getProgramName());
    Assert.assertEquals(new TimeTrigger("0/15 * * * * ?"), schedule2.getTrigger());
    String newSchedule = "newTimeSchedule";
    testAddSchedule(newSchedule);
    testDeleteSchedule(appV2Id, newSchedule);
    testUpdateSchedule(appV2Id);
}
Also used : ArtifactSummary(co.cask.cdap.api.artifact.ArtifactSummary) TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) ProgramId(co.cask.cdap.proto.id.ProgramId) Id(co.cask.cdap.common.id.Id) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) ApplicationId(co.cask.cdap.proto.id.ApplicationId) AppRequest(co.cask.cdap.proto.artifact.AppRequest) Test(org.junit.Test)

Aggregations

ArtifactSummary (co.cask.cdap.api.artifact.ArtifactSummary)64 Test (org.junit.Test)33 ApplicationId (co.cask.cdap.proto.id.ApplicationId)32 AppRequest (co.cask.cdap.proto.artifact.AppRequest)31 ArtifactId (co.cask.cdap.proto.id.ArtifactId)25 NamespaceId (co.cask.cdap.proto.id.NamespaceId)17 Id (co.cask.cdap.common.id.Id)13 IOException (java.io.IOException)13 ProgramId (co.cask.cdap.proto.id.ProgramId)12 HashMap (java.util.HashMap)10 ArtifactVersion (co.cask.cdap.api.artifact.ArtifactVersion)9 NotFoundException (co.cask.cdap.common.NotFoundException)8 HashSet (java.util.HashSet)8 PluginInfo (co.cask.cdap.proto.artifact.PluginInfo)7 Map (java.util.Map)7 Set (java.util.Set)7 JsonObject (com.google.gson.JsonObject)6 URL (java.net.URL)6 ConfigTestApp (co.cask.cdap.ConfigTestApp)5 ArtifactRange (co.cask.cdap.api.artifact.ArtifactRange)5