Search in sources :

Example 1 with ProgramStatusTrigger

use of co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class DataPipelineTest method deployPipelineWithSchedule.

private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
    String tableName = "actionScheduleTable" + pipelineName + engine;
    String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
    String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
    String key1 = key1Mapping.getTarget();
    String key2 = key2Mapping.getTarget();
    ETLBatchConfig etlConfig = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
    AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
    ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
    ApplicationManager appManager = deployApplication(appId, appRequest);
    // there should be only two programs - one workflow and one mapreduce/spark
    Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
    // Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
    StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
    StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
    StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
    // write one record to each source
    DataSetManager<Table> inputManager = getDataset(sourceName);
    MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
    String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
    // Use properties from the triggering pipeline as values for runtime argument key1, key2
    TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
    ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
    ScheduleId scheduleId = appId.schedule("completeSchedule");
    appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.<Constraint>of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null));
    appManager.enableSchedule(scheduleId);
    return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
Also used : ApplicationManager(co.cask.cdap.test.ApplicationManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Table(co.cask.cdap.api.dataset.table.Table) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) Schema(co.cask.cdap.api.data.schema.Schema) WorkflowId(co.cask.cdap.proto.id.WorkflowId) ScheduleId(co.cask.cdap.proto.id.ScheduleId) StructuredRecord(co.cask.cdap.api.data.format.StructuredRecord) AppRequest(co.cask.cdap.proto.artifact.AppRequest) ETLBatchConfig(co.cask.cdap.etl.proto.v2.ETLBatchConfig) ETLStage(co.cask.cdap.etl.proto.v2.ETLStage) TriggeringPropertyMapping(co.cask.cdap.etl.proto.v2.TriggeringPropertyMapping) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ScheduleProgramInfo(co.cask.cdap.api.workflow.ScheduleProgramInfo)

Example 2 with ProgramStatusTrigger

use of co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ProgramScheduleStoreDatasetTest method testDeleteScheduleByTriggeringProgram.

@Test
public void testDeleteScheduleByTriggeringProgram() throws Exception {
    DatasetFramework dsFramework = getInjector().getInstance(DatasetFramework.class);
    TransactionSystemClient txClient = getInjector().getInstance(TransactionSystemClient.class);
    TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txClient);
    dsFramework.truncateInstance(Schedulers.STORE_DATASET_ID);
    final ProgramScheduleStoreDataset store = dsFramework.getDataset(Schedulers.STORE_DATASET_ID, new HashMap<String, String>(), null);
    Assert.assertNotNull(store);
    TransactionExecutor txExecutor = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) store));
    SatisfiableTrigger prog1Trigger = new ProgramStatusTrigger(PROG1_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
    SatisfiableTrigger prog2Trigger = new ProgramStatusTrigger(PROG2_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
    final ProgramSchedule sched1 = new ProgramSchedule("sched1", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog1Trigger, ImmutableList.<Constraint>of());
    final ProgramSchedule sched2 = new ProgramSchedule("sched2", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog2Trigger, ImmutableList.<Constraint>of());
    final ProgramSchedule schedOr = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), prog1Trigger, new AndTrigger(new OrTrigger(prog1Trigger, prog2Trigger), new PartitionTrigger(DS2_ID, 1)), new OrTrigger(prog2Trigger)), ImmutableList.<Constraint>of());
    final ProgramSchedule schedAnd = new ProgramSchedule("schedAnd", "an AND trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new AndTrigger(new PartitionTrigger(DS1_ID, 1), prog2Trigger, new AndTrigger(prog1Trigger, new PartitionTrigger(DS2_ID, 1))), ImmutableList.<Constraint>of());
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            store.addSchedules(ImmutableList.of(sched1, sched2, schedOr, schedAnd));
        }
    });
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // ProgramStatus event for PROG1_ID should trigger only sched1, schedOr, schedAnd
            Assert.assertEquals(ImmutableSet.of(sched1, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
            // ProgramStatus event for PROG2_ID should trigger only sched2, schedOr, schedAnd
            Assert.assertEquals(ImmutableSet.of(sched2, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
        }
    });
    // update or delete all schedules triggered by PROG1_ID
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            store.modifySchedulesTriggeredByDeletedProgram(PROG1_ID);
        }
    });
    final ProgramSchedule schedOrNew = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), new AndTrigger(prog2Trigger, new PartitionTrigger(DS2_ID, 1)), prog2Trigger), ImmutableList.of());
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // ProgramStatus event for PROG1_ID should trigger no schedules after modifying schedules triggered by it
            Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
            Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
            Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
            // ProgramStatus event for PROG2_ID should trigger only sched2 and schedOrNew
            Assert.assertEquals(ImmutableSet.of(sched2, schedOrNew), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
        }
    });
    // update or delete all schedules triggered by PROG2_ID
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            store.modifySchedulesTriggeredByDeletedProgram(PROG2_ID);
        }
    });
    final ProgramSchedule schedOrNew1 = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS1_ID, 1), ImmutableList.of());
    final Set<ProgramSchedule> ds1Schedules = new HashSet<>();
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // ProgramStatus event for PROG2_ID should trigger no schedules after modifying schedules triggered by it
            Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.COMPLETED))));
            Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
            Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.KILLED))));
            // event for DS1 should trigger only schedOrNew1 since all other schedules are deleted
            ds1Schedules.addAll(toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
        }
    });
    Assert.assertEquals(ImmutableSet.of(schedOrNew1), ds1Schedules);
}
Also used : OrTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.OrTrigger) DynamicTransactionExecutorFactory(co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory) TransactionExecutor(org.apache.tephra.TransactionExecutor) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) TransactionExecutorFactory(co.cask.cdap.data2.transaction.TransactionExecutorFactory) DynamicTransactionExecutorFactory(co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory) AndTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.AndTrigger) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) SatisfiableTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) TransactionAware(org.apache.tephra.TransactionAware) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) PartitionTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 3 with ProgramStatusTrigger

use of co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ProgramScheduleStoreDataset method modifySchedulesTriggeredByDeletedProgram.

/**
 * Update all schedules that can be triggered by the given deleted program. A schedule will be removed if
 * the only {@link ProgramStatusTrigger} in it is triggered by the deleted program. Schedules with composite triggers
 * will be updated if the composite trigger can still be satisfied after the program is deleted, otherwise the
 * schedules will be deleted.
 *
 * @param programId the program id for which to delete the schedules
 * @return the IDs of the schedules that were deleted
 */
public List<ScheduleId> modifySchedulesTriggeredByDeletedProgram(ProgramId programId) {
    List<ScheduleId> deleted = new ArrayList<>();
    Set<ProgramScheduleRecord> scheduleRecords = new HashSet<>();
    for (ProgramStatus status : ProgramStatus.values()) {
        scheduleRecords.addAll(findSchedules(Schedulers.triggerKeyForProgramStatus(programId, status)));
    }
    for (ProgramScheduleRecord scheduleRecord : scheduleRecords) {
        ProgramSchedule schedule = scheduleRecord.getSchedule();
        try {
            deleteSchedule(schedule.getScheduleId());
        } catch (NotFoundException e) {
            // this should never happen
            LOG.warn("Failed to delete the schedule '{}' triggered by '{}', skip this schedule.", schedule.getScheduleId(), programId, e);
            continue;
        }
        if (schedule.getTrigger() instanceof AbstractSatisfiableCompositeTrigger) {
            // get the updated composite trigger by removing the program status trigger of the given program
            Trigger updatedTrigger = ((AbstractSatisfiableCompositeTrigger) schedule.getTrigger()).getTriggerWithDeletedProgram(programId);
            if (updatedTrigger == null) {
                deleted.add(schedule.getScheduleId());
                continue;
            }
            // if the updated composite trigger is not null, add the schedule back with updated composite trigger
            try {
                addScheduleWithStatus(new ProgramSchedule(schedule.getName(), schedule.getDescription(), schedule.getProgramId(), schedule.getProperties(), updatedTrigger, schedule.getConstraints(), schedule.getTimeoutMillis()), scheduleRecord.getMeta().getStatus(), System.currentTimeMillis());
            } catch (AlreadyExistsException e) {
                // this should never happen
                LOG.warn("Failed to add the schedule '{}' triggered by '{}' with updated trigger '{}', " + "skip adding this schedule.", schedule.getScheduleId(), programId, updatedTrigger, e);
            }
        } else {
            deleted.add(schedule.getScheduleId());
        }
    }
    return deleted;
}
Also used : SatisfiableTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger) Trigger(co.cask.cdap.api.schedule.Trigger) AbstractSatisfiableCompositeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.AbstractSatisfiableCompositeTrigger) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) AlreadyExistsException(co.cask.cdap.common.AlreadyExistsException) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) ArrayList(java.util.ArrayList) AbstractSatisfiableCompositeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.AbstractSatisfiableCompositeTrigger) NotFoundException(co.cask.cdap.common.NotFoundException) ProgramScheduleRecord(co.cask.cdap.internal.app.runtime.schedule.ProgramScheduleRecord) ScheduleId(co.cask.cdap.proto.id.ScheduleId) HashSet(java.util.HashSet) ProgramStatus(co.cask.cdap.api.ProgramStatus)

Example 4 with ProgramStatusTrigger

use of co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ConfiguratorTest method testAppWithConfig.

@Test
public void testAppWithConfig() throws Exception {
    LocationFactory locationFactory = new LocalLocationFactory(TMP_FOLDER.newFolder());
    Location appJar = AppJarHelper.createDeploymentJar(locationFactory, ConfigTestApp.class);
    Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, ConfigTestApp.class.getSimpleName(), "1.0.0");
    CConfiguration cConf = CConfiguration.create();
    ArtifactRepository baseArtifactRepo = new DefaultArtifactRepository(conf, null, null, new DummyProgramRunnerFactory(), new DefaultImpersonator(cConf, null));
    ArtifactRepository artifactRepo = new AuthorizationArtifactRepository(baseArtifactRepo, authEnforcer, authenticationContext);
    ConfigTestApp.ConfigClass config = new ConfigTestApp.ConfigClass("myStream", "myTable");
    // Create a configurator that is testable. Provide it an application.
    try (CloseableClassLoader artifactClassLoader = artifactRepo.createArtifactClassLoader(appJar, new EntityImpersonator(artifactId.getNamespace().toEntityId(), new DefaultImpersonator(cConf, null)))) {
        Configurator configuratorWithConfig = new InMemoryConfigurator(conf, Id.Namespace.DEFAULT, artifactId, ConfigTestApp.class.getName(), artifactRepo, artifactClassLoader, null, null, new Gson().toJson(config));
        ListenableFuture<ConfigResponse> result = configuratorWithConfig.config();
        ConfigResponse response = result.get(10, TimeUnit.SECONDS);
        Assert.assertNotNull(response);
        ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
        ApplicationSpecification specification = adapter.fromJson(response.get());
        Assert.assertNotNull(specification);
        Assert.assertTrue(specification.getStreams().size() == 1);
        Assert.assertTrue(specification.getStreams().containsKey("myStream"));
        Assert.assertTrue(specification.getDatasets().size() == 1);
        Assert.assertTrue(specification.getDatasets().containsKey("myTable"));
        Configurator configuratorWithoutConfig = new InMemoryConfigurator(conf, Id.Namespace.DEFAULT, artifactId, ConfigTestApp.class.getName(), artifactRepo, artifactClassLoader, null, null, null);
        result = configuratorWithoutConfig.config();
        response = result.get(10, TimeUnit.SECONDS);
        Assert.assertNotNull(response);
        specification = adapter.fromJson(response.get());
        Assert.assertNotNull(specification);
        Assert.assertTrue(specification.getStreams().size() == 1);
        Assert.assertTrue(specification.getStreams().containsKey(ConfigTestApp.DEFAULT_STREAM));
        Assert.assertTrue(specification.getDatasets().size() == 1);
        Assert.assertTrue(specification.getDatasets().containsKey(ConfigTestApp.DEFAULT_TABLE));
        Assert.assertNotNull(specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME));
        ProgramStatusTrigger trigger = (ProgramStatusTrigger) specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME).getTrigger();
        Assert.assertEquals(trigger.getProgramId().getProgram(), ConfigTestApp.WORKFLOW_NAME);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) Configurator(co.cask.cdap.app.deploy.Configurator) DefaultArtifactRepository(co.cask.cdap.internal.app.runtime.artifact.DefaultArtifactRepository) Gson(com.google.gson.Gson) ConfigResponse(co.cask.cdap.app.deploy.ConfigResponse) CloseableClassLoader(co.cask.cdap.api.artifact.CloseableClassLoader) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) LocalLocationFactory(org.apache.twill.filesystem.LocalLocationFactory) ConfigTestApp(co.cask.cdap.ConfigTestApp) AuthorizationArtifactRepository(co.cask.cdap.internal.app.runtime.artifact.AuthorizationArtifactRepository) DummyProgramRunnerFactory(co.cask.cdap.app.runtime.DummyProgramRunnerFactory) EntityImpersonator(co.cask.cdap.security.impersonation.EntityImpersonator) AuthorizationArtifactRepository(co.cask.cdap.internal.app.runtime.artifact.AuthorizationArtifactRepository) ArtifactRepository(co.cask.cdap.internal.app.runtime.artifact.ArtifactRepository) DefaultArtifactRepository(co.cask.cdap.internal.app.runtime.artifact.DefaultArtifactRepository) ReflectionSchemaGenerator(co.cask.cdap.internal.io.ReflectionSchemaGenerator) CConfiguration(co.cask.cdap.common.conf.CConfiguration) DefaultImpersonator(co.cask.cdap.security.impersonation.DefaultImpersonator) LocalLocationFactory(org.apache.twill.filesystem.LocalLocationFactory) LocationFactory(org.apache.twill.filesystem.LocationFactory) ApplicationSpecificationAdapter(co.cask.cdap.internal.app.ApplicationSpecificationAdapter) Id(co.cask.cdap.common.id.Id) Location(org.apache.twill.filesystem.Location) Test(org.junit.Test)

Example 5 with ProgramStatusTrigger

use of co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ProgramScheduleStoreDatasetTest method testFindSchedulesByEventAndUpdateSchedule.

@Test
public void testFindSchedulesByEventAndUpdateSchedule() throws Exception {
    DatasetFramework dsFramework = getInjector().getInstance(DatasetFramework.class);
    TransactionSystemClient txClient = getInjector().getInstance(TransactionSystemClient.class);
    TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txClient);
    dsFramework.truncateInstance(Schedulers.STORE_DATASET_ID);
    final ProgramScheduleStoreDataset store = dsFramework.getDataset(Schedulers.STORE_DATASET_ID, new HashMap<String, String>(), null);
    Assert.assertNotNull(store);
    TransactionExecutor txExecutor = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) store));
    final ProgramSchedule sched11 = new ProgramSchedule("sched11", "one partition schedule", PROG1_ID, ImmutableMap.of("prop3", "abc"), new PartitionTrigger(DS1_ID, 1), ImmutableList.<Constraint>of());
    final ProgramSchedule sched12 = new ProgramSchedule("sched12", "two partition schedule", PROG1_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS2_ID, 2), ImmutableList.<Constraint>of());
    final ProgramSchedule sched22 = new ProgramSchedule("sched22", "twentytwo partition schedule", PROG2_ID, ImmutableMap.of("nn", "4"), new PartitionTrigger(DS2_ID, 22), ImmutableList.<Constraint>of());
    final ProgramSchedule sched31 = new ProgramSchedule("sched31", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new ProgramStatusTrigger(PROG1_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED), ImmutableList.<Constraint>of());
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // event for DS1 or DS2 should trigger nothing. validate it returns an empty collection
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID)).isEmpty());
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID)).isEmpty());
            // event for PROG1 should trigger nothing. it should also return an empty collection
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED)).isEmpty());
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED)).isEmpty());
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED)).isEmpty());
        }
    });
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            store.addSchedules(ImmutableList.of(sched11, sched12, sched22, sched31));
        }
    });
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // event for ProgramStatus should trigger only sched31
            Assert.assertEquals(ImmutableSet.of(sched31), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
            Assert.assertEquals(ImmutableSet.of(sched31), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
            Assert.assertEquals(ImmutableSet.of(sched31), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
            // event for DS1 should trigger only sched11
            Assert.assertEquals(ImmutableSet.of(sched11), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
            // event for DS2 triggers only sched12 and sched22
            Assert.assertEquals(ImmutableSet.of(sched12, sched22), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID))));
        }
    });
    final ProgramSchedule sched11New = new ProgramSchedule(sched11.getName(), "time schedule", PROG1_ID, ImmutableMap.of("timeprop", "time"), new TimeTrigger("* * * * *"), ImmutableList.<Constraint>of());
    final ProgramSchedule sched12New = new ProgramSchedule(sched12.getName(), "one partition schedule", PROG1_ID, ImmutableMap.of("pp", "p"), new PartitionTrigger(DS1_ID, 2), ImmutableList.<Constraint>of());
    final ProgramSchedule sched22New = new ProgramSchedule(sched22.getName(), "program3 failed schedule", PROG2_ID, ImmutableMap.of("ss", "s"), new ProgramStatusTrigger(PROG3_ID, ProgramStatus.FAILED), ImmutableList.<Constraint>of());
    final ProgramSchedule sched31New = new ProgramSchedule(sched31.getName(), "program1 failed schedule", PROG3_ID, ImmutableMap.of("abcd", "efgh"), new ProgramStatusTrigger(PROG1_ID, ProgramStatus.FAILED), ImmutableList.<Constraint>of());
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            store.updateSchedule(sched11New);
            store.updateSchedule(sched12New);
            store.updateSchedule(sched22New);
            store.updateSchedule(sched31New);
        }
    });
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // event for DS1 should trigger only sched12New after update
            Assert.assertEquals(ImmutableSet.of(sched12New), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
            // event for DS2 triggers no schedule after update
            Assert.assertEquals(ImmutableSet.<ProgramSchedule>of(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID))));
            // event for PS triggers only for failed program statuses, not completed nor killed
            Assert.assertEquals(ImmutableSet.of(sched31New), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
            Assert.assertEquals(ImmutableSet.of(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
            Assert.assertEquals(ImmutableSet.of(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
        }
    });
}
Also used : TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) DynamicTransactionExecutorFactory(co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory) TransactionExecutor(org.apache.tephra.TransactionExecutor) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) TransactionExecutorFactory(co.cask.cdap.data2.transaction.TransactionExecutorFactory) DynamicTransactionExecutorFactory(co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) TransactionAware(org.apache.tephra.TransactionAware) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) PartitionTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger) Test(org.junit.Test)

Aggregations

ProgramStatusTrigger (co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger)5 ProgramSchedule (co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule)3 Test (org.junit.Test)3 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)2 DynamicTransactionExecutorFactory (co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory)2 DatasetFramework (co.cask.cdap.data2.dataset2.DatasetFramework)2 TransactionExecutorFactory (co.cask.cdap.data2.transaction.TransactionExecutorFactory)2 PartitionTrigger (co.cask.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger)2 SatisfiableTrigger (co.cask.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger)2 ScheduleId (co.cask.cdap.proto.id.ScheduleId)2 HashSet (java.util.HashSet)2 TransactionAware (org.apache.tephra.TransactionAware)2 TransactionExecutor (org.apache.tephra.TransactionExecutor)2 TransactionSystemClient (org.apache.tephra.TransactionSystemClient)2 ConfigTestApp (co.cask.cdap.ConfigTestApp)1 ProgramStatus (co.cask.cdap.api.ProgramStatus)1 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)1 CloseableClassLoader (co.cask.cdap.api.artifact.CloseableClassLoader)1 StructuredRecord (co.cask.cdap.api.data.format.StructuredRecord)1 Schema (co.cask.cdap.api.data.schema.Schema)1