Search in sources :

Example 1 with ProgramStatusTrigger

use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class DataPipelineTest method deployPipelineWithSchedule.

private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
    String tableName = "actionScheduleTable" + pipelineName + engine;
    String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
    String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
    String key1 = key1Mapping.getTarget();
    String key2 = key2Mapping.getTarget();
    ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
    AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
    ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
    ApplicationManager appManager = deployApplication(appId, appRequest);
    // there should be only two programs - one workflow and one mapreduce/spark
    Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
    // Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
    StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
    StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
    StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
    // write one record to each source
    DataSetManager<Table> inputManager = getDataset(sourceName);
    MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
    String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
    // Use properties from the triggering pipeline as values for runtime argument key1, key2
    TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
    ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
    ScheduleId scheduleId = appId.schedule("completeSchedule");
    appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null, null));
    appManager.enableSchedule(scheduleId);
    return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
Also used : ApplicationManager(io.cdap.cdap.test.ApplicationManager) Table(io.cdap.cdap.api.dataset.table.Table) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) Schema(io.cdap.cdap.api.data.schema.Schema) WorkflowId(io.cdap.cdap.proto.id.WorkflowId) ScheduleId(io.cdap.cdap.proto.id.ScheduleId) StructuredRecord(io.cdap.cdap.api.data.format.StructuredRecord) AppRequest(io.cdap.cdap.proto.artifact.AppRequest) ETLBatchConfig(io.cdap.cdap.etl.proto.v2.ETLBatchConfig) ETLStage(io.cdap.cdap.etl.proto.v2.ETLStage) TriggeringPropertyMapping(io.cdap.cdap.etl.proto.v2.TriggeringPropertyMapping) ProgramStatusTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) ScheduleDetail(io.cdap.cdap.proto.ScheduleDetail) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) ScheduleProgramInfo(io.cdap.cdap.api.workflow.ScheduleProgramInfo)

Example 2 with ProgramStatusTrigger

use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ScheduleTaskRunnerTest method testRuntimeArgumentResolution.

@Test
public void testRuntimeArgumentResolution() throws Exception {
    Injector injector = Guice.createInjector(new ConfigModule(), binder -> {
        binder.bind(OwnerAdmin.class).to(NoOpOwnerAdmin.class);
        binder.bind(NamespaceAdmin.class).to(InMemoryNamespaceAdmin.class);
        binder.bind(NamespaceQueryAdmin.class).to(InMemoryNamespaceAdmin.class);
        binder.bind(PreferencesFetcher.class).toInstance(new FakePreferencesFetcher(Collections.singletonMap("key", "should-be-overridden")));
    });
    PropertiesResolver propertiesResolver = injector.getInstance(PropertiesResolver.class);
    ApplicationId appId = NamespaceId.DEFAULT.app("app");
    ProgramSchedule programSchedule = new ProgramSchedule("schedule", "desc", appId.workflow("wf2"), Collections.singletonMap("key", "val"), new ProgramStatusTrigger(appId.workflow("wf1")), Collections.emptyList());
    Map<String, String> userArgs = ScheduleTaskRunner.getUserArgs(programSchedule, propertiesResolver);
    Assert.assertEquals("val", userArgs.get("key"));
}
Also used : PropertiesResolver(io.cdap.cdap.internal.app.services.PropertiesResolver) ConfigModule(io.cdap.cdap.common.guice.ConfigModule) NoOpOwnerAdmin(io.cdap.cdap.security.impersonation.NoOpOwnerAdmin) OwnerAdmin(io.cdap.cdap.security.impersonation.OwnerAdmin) InMemoryNamespaceAdmin(io.cdap.cdap.common.namespace.InMemoryNamespaceAdmin) NamespaceAdmin(io.cdap.cdap.common.namespace.NamespaceAdmin) Injector(com.google.inject.Injector) ProgramStatusTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) NamespaceQueryAdmin(io.cdap.cdap.common.namespace.NamespaceQueryAdmin) FakePreferencesFetcher(io.cdap.cdap.metadata.FakePreferencesFetcher) PreferencesFetcher(io.cdap.cdap.metadata.PreferencesFetcher) FakePreferencesFetcher(io.cdap.cdap.metadata.FakePreferencesFetcher) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) Test(org.junit.Test)

Example 3 with ProgramStatusTrigger

use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ProgramScheduleStoreDatasetTest method testDeleteScheduleByTriggeringProgram.

@Test
public void testDeleteScheduleByTriggeringProgram() {
    TransactionRunner transactionRunner = getTransactionRunner();
    SatisfiableTrigger prog1Trigger = new ProgramStatusTrigger(PROG1_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
    SatisfiableTrigger prog2Trigger = new ProgramStatusTrigger(PROG2_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
    final ProgramSchedule sched1 = new ProgramSchedule("sched1", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog1Trigger, ImmutableList.<Constraint>of());
    final ProgramSchedule sched2 = new ProgramSchedule("sched2", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog2Trigger, ImmutableList.<Constraint>of());
    final ProgramSchedule schedOr = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), prog1Trigger, new AndTrigger(new OrTrigger(prog1Trigger, prog2Trigger), new PartitionTrigger(DS2_ID, 1)), new OrTrigger(prog2Trigger)), ImmutableList.<Constraint>of());
    final ProgramSchedule schedAnd = new ProgramSchedule("schedAnd", "an AND trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new AndTrigger(new PartitionTrigger(DS1_ID, 1), prog2Trigger, new AndTrigger(prog1Trigger, new PartitionTrigger(DS2_ID, 1))), ImmutableList.<Constraint>of());
    TransactionRunners.run(transactionRunner, context -> {
        ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
        store.addSchedules(ImmutableList.of(sched1, sched2, schedOr, schedAnd));
    });
    TransactionRunners.run(transactionRunner, context -> {
        ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
        // ProgramStatus event for PROG1_ID should trigger only sched1, schedOr, schedAnd
        Assert.assertEquals(ImmutableSet.of(sched1, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
        // ProgramStatus event for PROG2_ID should trigger only sched2, schedOr, schedAnd
        Assert.assertEquals(ImmutableSet.of(sched2, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
    });
    // update or delete all schedules triggered by PROG1_ID
    TransactionRunners.run(transactionRunner, context -> {
        ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
        store.modifySchedulesTriggeredByDeletedProgram(PROG1_ID);
    });
    final ProgramSchedule schedOrNew = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), new AndTrigger(prog2Trigger, new PartitionTrigger(DS2_ID, 1)), prog2Trigger), ImmutableList.of());
    TransactionRunners.run(transactionRunner, context -> {
        ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
        // ProgramStatus event for PROG1_ID should trigger no schedules after modifying schedules triggered by it
        Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
        Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
        Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
        // ProgramStatus event for PROG2_ID should trigger only sched2 and schedOrNew
        Assert.assertEquals(ImmutableSet.of(sched2, schedOrNew), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
    });
    // update or delete all schedules triggered by PROG2_ID
    TransactionRunners.run(transactionRunner, context -> {
        ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
        store.modifySchedulesTriggeredByDeletedProgram(PROG2_ID);
    });
    final ProgramSchedule schedOrNew1 = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS1_ID, 1), ImmutableList.of());
    final Set<ProgramSchedule> ds1Schedules = new HashSet<>();
    TransactionRunners.run(transactionRunner, context -> {
        ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
        // ProgramStatus event for PROG2_ID should trigger no schedules after modifying schedules triggered by it
        Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.COMPLETED))));
        Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
        Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.KILLED))));
        // event for DS1 should trigger only schedOrNew1 since all other schedules are deleted
        ds1Schedules.addAll(toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
    });
    Assert.assertEquals(ImmutableSet.of(schedOrNew1), ds1Schedules);
}
Also used : SatisfiableTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger) ProgramSchedule(io.cdap.cdap.internal.app.runtime.schedule.ProgramSchedule) OrTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.OrTrigger) TransactionRunner(io.cdap.cdap.spi.data.transaction.TransactionRunner) ProgramStatusTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) PartitionTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger) AndTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.AndTrigger) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 4 with ProgramStatusTrigger

use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ConfiguratorTest method testAppWithConfig.

@Test
public void testAppWithConfig() throws Exception {
    LocationFactory locationFactory = new LocalLocationFactory(TMP_FOLDER.newFolder());
    Location appJar = AppJarHelper.createDeploymentJar(locationFactory, ConfigTestApp.class);
    Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, ConfigTestApp.class.getSimpleName(), "1.0.0");
    CConfiguration cConf = CConfiguration.create();
    ArtifactRepository baseArtifactRepo = new DefaultArtifactRepository(conf, null, null, null, new DummyProgramRunnerFactory(), new DefaultImpersonator(cConf, null));
    ArtifactRepository artifactRepo = new AuthorizationArtifactRepository(baseArtifactRepo, authEnforcer, authenticationContext);
    PluginFinder pluginFinder = new LocalPluginFinder(artifactRepo);
    ConfigTestApp.ConfigClass config = new ConfigTestApp.ConfigClass("myTable");
    AppDeploymentInfo appDeploymentInfo = new AppDeploymentInfo(artifactId.toEntityId(), appJar, NamespaceId.DEFAULT, new ApplicationClass(ConfigTestApp.class.getName(), "", null), null, null, new Gson().toJson(config));
    // Create a configurator that is testable. Provide it an application.
    Configurator configurator = new InMemoryConfigurator(conf, pluginFinder, new DefaultImpersonator(cConf, null), artifactRepo, null, appDeploymentInfo);
    ListenableFuture<ConfigResponse> result = configurator.config();
    ConfigResponse response = result.get(10, TimeUnit.SECONDS);
    Assert.assertNotNull(response);
    AppSpecInfo appSpecInfo = response.getAppSpecInfo();
    if (appSpecInfo == null) {
        throw new IllegalStateException("Failed to deploy application");
    }
    ApplicationSpecification specification = appSpecInfo.getAppSpec();
    Assert.assertNotNull(specification);
    Assert.assertEquals(1, specification.getDatasets().size());
    Assert.assertTrue(specification.getDatasets().containsKey("myTable"));
    // Create a deployment info without the app configuration
    appDeploymentInfo = new AppDeploymentInfo(artifactId.toEntityId(), appJar, NamespaceId.DEFAULT, new ApplicationClass(ConfigTestApp.class.getName(), "", null), null, null, null);
    Configurator configuratorWithoutConfig = new InMemoryConfigurator(conf, pluginFinder, new DefaultImpersonator(cConf, null), artifactRepo, null, appDeploymentInfo);
    result = configuratorWithoutConfig.config();
    response = result.get(10, TimeUnit.SECONDS);
    Assert.assertNotNull(response);
    appSpecInfo = response.getAppSpecInfo();
    if (appSpecInfo == null) {
        throw new IllegalStateException("Failed to deploy application");
    }
    specification = appSpecInfo.getAppSpec();
    Assert.assertNotNull(specification);
    Assert.assertEquals(1, specification.getDatasets().size());
    Assert.assertTrue(specification.getDatasets().containsKey(ConfigTestApp.DEFAULT_TABLE));
    Assert.assertNotNull(specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME));
    ProgramStatusTrigger trigger = (ProgramStatusTrigger) specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME).getTrigger();
    Assert.assertEquals(trigger.getProgramId().getProgram(), ConfigTestApp.WORKFLOW_NAME);
}
Also used : ApplicationSpecification(io.cdap.cdap.api.app.ApplicationSpecification) LocalPluginFinder(io.cdap.cdap.internal.app.runtime.artifact.LocalPluginFinder) Configurator(io.cdap.cdap.app.deploy.Configurator) DefaultArtifactRepository(io.cdap.cdap.internal.app.runtime.artifact.DefaultArtifactRepository) Gson(com.google.gson.Gson) ConfigResponse(io.cdap.cdap.app.deploy.ConfigResponse) AppDeploymentInfo(io.cdap.cdap.internal.app.deploy.pipeline.AppDeploymentInfo) PluginFinder(io.cdap.cdap.internal.app.runtime.artifact.PluginFinder) LocalPluginFinder(io.cdap.cdap.internal.app.runtime.artifact.LocalPluginFinder) ProgramStatusTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) LocalLocationFactory(org.apache.twill.filesystem.LocalLocationFactory) ConfigTestApp(io.cdap.cdap.ConfigTestApp) AuthorizationArtifactRepository(io.cdap.cdap.internal.app.runtime.artifact.AuthorizationArtifactRepository) DummyProgramRunnerFactory(io.cdap.cdap.app.runtime.DummyProgramRunnerFactory) AppSpecInfo(io.cdap.cdap.internal.app.deploy.pipeline.AppSpecInfo) ApplicationClass(io.cdap.cdap.api.artifact.ApplicationClass) AuthorizationArtifactRepository(io.cdap.cdap.internal.app.runtime.artifact.AuthorizationArtifactRepository) DefaultArtifactRepository(io.cdap.cdap.internal.app.runtime.artifact.DefaultArtifactRepository) ArtifactRepository(io.cdap.cdap.internal.app.runtime.artifact.ArtifactRepository) CConfiguration(io.cdap.cdap.common.conf.CConfiguration) DefaultImpersonator(io.cdap.cdap.security.impersonation.DefaultImpersonator) LocalLocationFactory(org.apache.twill.filesystem.LocalLocationFactory) LocationFactory(org.apache.twill.filesystem.LocationFactory) NamespaceId(io.cdap.cdap.proto.id.NamespaceId) Id(io.cdap.cdap.common.id.Id) Location(org.apache.twill.filesystem.Location) Test(org.junit.Test)

Example 5 with ProgramStatusTrigger

use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.

the class ProgramScheduleStoreDataset method modifySchedulesTriggeredByDeletedProgram.

/**
 * Update all schedules that can be triggered by the given deleted program. A schedule will be removed if
 * the only {@link ProgramStatusTrigger} in it is triggered by the deleted program. Schedules with composite triggers
 * will be updated if the composite trigger can still be satisfied after the program is deleted, otherwise the
 * schedules will be deleted.
 *
 * @param programId the program id for which to delete the schedules
 * @return the IDs of the schedules that were deleted
 */
public List<ProgramSchedule> modifySchedulesTriggeredByDeletedProgram(ProgramId programId) throws IOException {
    long deleteTime = System.currentTimeMillis();
    List<ProgramSchedule> deleted = new ArrayList<>();
    Set<ProgramScheduleRecord> scheduleRecords = new HashSet<>();
    for (ProgramStatus status : ProgramStatus.values()) {
        scheduleRecords.addAll(findSchedules(Schedulers.triggerKeyForProgramStatus(programId, status)));
    }
    for (ProgramScheduleRecord scheduleRecord : scheduleRecords) {
        ProgramSchedule schedule = scheduleRecord.getSchedule();
        markScheduleAsDeleted(schedule.getScheduleId(), deleteTime);
        triggerStore.deleteAll(Range.singleton(getScheduleKeys(schedule.getScheduleId())));
        if (schedule.getTrigger() instanceof AbstractSatisfiableCompositeTrigger) {
            // get the updated composite trigger by removing the program status trigger of the given program
            Trigger updatedTrigger = ((AbstractSatisfiableCompositeTrigger) schedule.getTrigger()).getTriggerWithDeletedProgram(programId);
            if (updatedTrigger == null) {
                deleted.add(schedule);
                continue;
            }
            // if the updated composite trigger is not null, add the schedule back with updated composite trigger
            try {
                addScheduleWithStatus(new ProgramSchedule(schedule.getName(), schedule.getDescription(), schedule.getProgramId(), schedule.getProperties(), updatedTrigger, schedule.getConstraints(), schedule.getTimeoutMillis()), scheduleRecord.getMeta().getStatus(), System.currentTimeMillis());
            } catch (AlreadyExistsException e) {
                // this should never happen
                LOG.warn("Failed to add the schedule '{}' triggered by '{}' with updated trigger '{}', " + "skip adding this schedule.", schedule.getScheduleId(), programId, updatedTrigger, e);
            }
        } else {
            deleted.add(schedule);
        }
    }
    return deleted;
}
Also used : ProgramStatusTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) SatisfiableTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger) Trigger(io.cdap.cdap.api.schedule.Trigger) AbstractSatisfiableCompositeTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.AbstractSatisfiableCompositeTrigger) AlreadyExistsException(io.cdap.cdap.common.AlreadyExistsException) ProgramSchedule(io.cdap.cdap.internal.app.runtime.schedule.ProgramSchedule) ArrayList(java.util.ArrayList) AbstractSatisfiableCompositeTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.AbstractSatisfiableCompositeTrigger) ProgramScheduleRecord(io.cdap.cdap.internal.app.runtime.schedule.ProgramScheduleRecord) HashSet(java.util.HashSet) ProgramStatus(io.cdap.cdap.api.ProgramStatus)

Aggregations

ProgramStatusTrigger (io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger)6 Test (org.junit.Test)4 ProgramSchedule (io.cdap.cdap.internal.app.runtime.schedule.ProgramSchedule)3 PartitionTrigger (io.cdap.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger)2 SatisfiableTrigger (io.cdap.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger)2 ApplicationId (io.cdap.cdap.proto.id.ApplicationId)2 Gson (com.google.gson.Gson)1 Injector (com.google.inject.Injector)1 ConfigTestApp (io.cdap.cdap.ConfigTestApp)1 ProgramStatus (io.cdap.cdap.api.ProgramStatus)1 ApplicationSpecification (io.cdap.cdap.api.app.ApplicationSpecification)1 ApplicationClass (io.cdap.cdap.api.artifact.ApplicationClass)1 StructuredRecord (io.cdap.cdap.api.data.format.StructuredRecord)1 Schema (io.cdap.cdap.api.data.schema.Schema)1 KeyValueTable (io.cdap.cdap.api.dataset.lib.KeyValueTable)1 Table (io.cdap.cdap.api.dataset.table.Table)1 Trigger (io.cdap.cdap.api.schedule.Trigger)1 ScheduleProgramInfo (io.cdap.cdap.api.workflow.ScheduleProgramInfo)1 ConfigResponse (io.cdap.cdap.app.deploy.ConfigResponse)1 Configurator (io.cdap.cdap.app.deploy.Configurator)1