Search in sources :

Example 1 with TriggeringScheduleInfo

use of io.cdap.cdap.api.schedule.TriggeringScheduleInfo in project cdap by caskdata.

the class CoreSchedulerServiceTest method testRunScheduledJobs.

@Test
@Category(XSlowTests.class)
public void testRunScheduledJobs() throws Exception {
    CConfiguration cConf = getInjector().getInstance(CConfiguration.class);
    dataEventTopic = NamespaceId.SYSTEM.topic(cConf.get(Constants.Dataset.DATA_EVENT_TOPIC));
    // Deploy the app with version
    Id.Artifact appArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "appwithschedules", VERSION1);
    addAppArtifact(appArtifactId, AppWithFrequentScheduledWorkflows.class);
    AppRequest<? extends Config> appRequest = new AppRequest<>(new ArtifactSummary(appArtifactId.getName(), appArtifactId.getVersion().getVersion()));
    deploy(APP_ID, appRequest);
    // Resume the schedule because schedules are initialized as paused
    enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1);
    enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_2);
    enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
    enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
    for (int i = 0; i < 5; i++) {
        testNewPartition(i + 1);
    }
    // Enable COMPOSITE_SCHEDULE before publishing events to DATASET_NAME2
    enableSchedule(AppWithFrequentScheduledWorkflows.COMPOSITE_SCHEDULE);
    // disable the two partition schedules, send them notifications (but they should not trigger)
    int runs1 = getRuns(WORKFLOW_1, ProgramRunStatus.ALL);
    int runs2 = getRuns(WORKFLOW_2, ProgramRunStatus.ALL);
    disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
    // ensure schedule 2 is disabled after schedule 1
    Thread.sleep(BUFFER);
    long disableBeforeTime = System.currentTimeMillis();
    disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
    long disableAfterTime = System.currentTimeMillis() + 1;
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME1);
    long minPublishTime = System.currentTimeMillis();
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
    // This would make sure the subscriber has processed the data event
    waitUntilProcessed(dataEventTopic, minPublishTime);
    // Both workflows must run at least once.
    // If the testNewPartition() loop took longer than expected, it may be more (quartz fired multiple times)
    Tasks.waitFor(true, () -> getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.COMPLETED) > 0 && getRuns(SCHEDULED_WORKFLOW_2, ProgramRunStatus.COMPLETED) > 0, 10, TimeUnit.SECONDS);
    // There shouldn't be any partition trigger in the job queue
    Assert.assertFalse(Iterables.any(getAllJobs(), job -> job.getSchedule().getTrigger() instanceof ProtoTrigger.PartitionTrigger));
    ProgramId compositeWorkflow = APP_ID.workflow(AppWithFrequentScheduledWorkflows.COMPOSITE_WORKFLOW);
    // Workflow scheduled with the composite trigger has never been started
    Assert.assertEquals(0, getRuns(compositeWorkflow, ProgramRunStatus.ALL));
    // Publish two more new partition notifications to satisfy the partition trigger in the composite trigger,
    // and thus the whole composite trigger will be satisfied
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
    minPublishTime = System.currentTimeMillis();
    publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
    // This would make sure the subscriber has processed the data event
    waitUntilProcessed(dataEventTopic, minPublishTime);
    // Wait for 1 run to complete for compositeWorkflow
    waitForCompleteRuns(1, compositeWorkflow);
    for (RunRecordDetail runRecordMeta : store.getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, Integer.MAX_VALUE).values()) {
        Map<String, String> sysArgs = runRecordMeta.getSystemArgs();
        Assert.assertNotNull(sysArgs);
        TriggeringScheduleInfo scheduleInfo = GSON.fromJson(sysArgs.get(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO), TriggeringScheduleInfo.class);
        Assert.assertEquals(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1, scheduleInfo.getName());
        List<TriggerInfo> triggerInfos = scheduleInfo.getTriggerInfos();
        // Only one notification is enough to satisfy Time Trigger
        Assert.assertEquals(1, triggerInfos.size());
        Assert.assertEquals(TriggerInfo.Type.TIME, triggerInfos.get(0).getType());
    }
    // Also verify that the two partition schedules did not trigger
    Assert.assertEquals(runs1, getRuns(WORKFLOW_1, ProgramRunStatus.ALL));
    Assert.assertEquals(runs2, getRuns(WORKFLOW_2, ProgramRunStatus.ALL));
    // enable partition schedule 2 and test reEnableSchedules
    scheduler.reEnableSchedules(NamespaceId.DEFAULT, disableBeforeTime, disableAfterTime);
    Assert.assertEquals(ProgramScheduleStatus.SCHEDULED, scheduler.getScheduleStatus(APP_ID.schedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2)));
    Assert.assertEquals(ProgramScheduleStatus.SUSPENDED, scheduler.getScheduleStatus(APP_ID.schedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1)));
    testScheduleUpdate("disable");
    testScheduleUpdate("update");
    testScheduleUpdate("delete");
}
Also used : RunRecordDetail(io.cdap.cdap.internal.app.store.RunRecordDetail) WorkflowTokenDetailCodec(io.cdap.cdap.proto.codec.WorkflowTokenDetailCodec) TypeToken(com.google.gson.reflect.TypeToken) StoreRequestBuilder(io.cdap.cdap.messaging.client.StoreRequestBuilder) TransactionRunners(io.cdap.cdap.spi.data.transaction.TransactionRunners) NamespaceId(io.cdap.cdap.proto.id.NamespaceId) Notification(io.cdap.cdap.proto.Notification) HttpResponse(io.cdap.common.http.HttpResponse) XSlowTests(io.cdap.cdap.test.XSlowTests) Bytes(io.cdap.cdap.api.common.Bytes) AlreadyExistsException(io.cdap.cdap.common.AlreadyExistsException) GsonBuilder(com.google.gson.GsonBuilder) ProgramScheduleStatus(io.cdap.cdap.internal.app.runtime.schedule.ProgramScheduleStatus) AppWithFrequentScheduledWorkflows(io.cdap.cdap.AppWithFrequentScheduledWorkflows) ProgramStateWriter(io.cdap.cdap.app.runtime.ProgramStateWriter) SimpleProgramOptions(io.cdap.cdap.internal.app.runtime.SimpleProgramOptions) DatasetId(io.cdap.cdap.proto.id.DatasetId) ScheduleId(io.cdap.cdap.proto.id.ScheduleId) Gson(com.google.gson.Gson) ArtifactSummary(io.cdap.cdap.api.artifact.ArtifactSummary) WorkflowTokenDetail(io.cdap.cdap.proto.WorkflowTokenDetail) Map(java.util.Map) AppWithMultipleSchedules(io.cdap.cdap.AppWithMultipleSchedules) ClassRule(org.junit.ClassRule) Tasks(io.cdap.cdap.common.utils.Tasks) AfterClass(org.junit.AfterClass) ImmutableMap(com.google.common.collect.ImmutableMap) ApplicationSpecification(io.cdap.cdap.api.app.ApplicationSpecification) MessagingService(io.cdap.cdap.messaging.MessagingService) PartitionKey(io.cdap.cdap.api.dataset.lib.PartitionKey) Constraint(io.cdap.cdap.internal.schedule.constraint.Constraint) ProgramRunStatus(io.cdap.cdap.proto.ProgramRunStatus) Category(org.junit.experimental.categories.Category) Id(io.cdap.cdap.common.id.Id) MessageId(io.cdap.cdap.messaging.data.MessageId) List(java.util.List) TransactionRunner(io.cdap.cdap.spi.data.transaction.TransactionRunner) Constants(io.cdap.cdap.common.conf.Constants) ProfileId(io.cdap.cdap.proto.id.ProfileId) ProgramOptionConstants(io.cdap.cdap.internal.app.runtime.ProgramOptionConstants) TimeTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) NotFoundException(io.cdap.cdap.common.NotFoundException) RunRecord(io.cdap.cdap.proto.RunRecord) TriggerInfo(io.cdap.cdap.api.schedule.TriggerInfo) ProfileConflictException(io.cdap.cdap.common.ProfileConflictException) Iterables(com.google.common.collect.Iterables) WorkflowId(io.cdap.cdap.proto.id.WorkflowId) BeforeClass(org.junit.BeforeClass) MessagingProgramStateWriter(io.cdap.cdap.internal.app.program.MessagingProgramStateWriter) AppFabricTestBase(io.cdap.cdap.internal.app.services.http.AppFabricTestBase) JobQueueTable(io.cdap.cdap.internal.app.runtime.schedule.queue.JobQueueTable) TopicId(io.cdap.cdap.proto.id.TopicId) ProgramType(io.cdap.cdap.proto.ProgramType) WorkflowToken(io.cdap.cdap.api.workflow.WorkflowToken) PartitionTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger) ProgramRunId(io.cdap.cdap.proto.id.ProgramRunId) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) ProgramOptions(io.cdap.cdap.app.runtime.ProgramOptions) TriggeringScheduleInfoAdapter(io.cdap.cdap.internal.app.runtime.schedule.TriggeringScheduleInfoAdapter) Profile(io.cdap.cdap.proto.profile.Profile) SystemArguments(io.cdap.cdap.internal.app.runtime.SystemArguments) TriggeringScheduleInfo(io.cdap.cdap.api.schedule.TriggeringScheduleInfo) Nullable(javax.annotation.Nullable) ProtoTrigger(io.cdap.cdap.proto.ProtoTrigger) DefaultApplicationSpecification(io.cdap.cdap.internal.app.DefaultApplicationSpecification) RunIds(io.cdap.cdap.common.app.RunIds) ProgramId(io.cdap.cdap.proto.id.ProgramId) Config(io.cdap.cdap.api.Config) ProgramDescriptor(io.cdap.cdap.app.program.ProgramDescriptor) Test(org.junit.Test) ConflictException(io.cdap.cdap.common.ConflictException) ProjectInfo(io.cdap.cdap.common.utils.ProjectInfo) CloseableIterator(io.cdap.cdap.api.dataset.lib.CloseableIterator) Service(com.google.common.util.concurrent.Service) ProgramSchedule(io.cdap.cdap.internal.app.runtime.schedule.ProgramSchedule) Store(io.cdap.cdap.app.store.Store) Job(io.cdap.cdap.internal.app.runtime.schedule.queue.Job) TimeUnit(java.util.concurrent.TimeUnit) CConfiguration(io.cdap.cdap.common.conf.CConfiguration) AppRequest(io.cdap.cdap.proto.artifact.AppRequest) Assert(org.junit.Assert) Collections(java.util.Collections) ArtifactId(io.cdap.cdap.api.artifact.ArtifactId) TemporaryFolder(org.junit.rules.TemporaryFolder) BasicArguments(io.cdap.cdap.internal.app.runtime.BasicArguments) RunRecordDetail(io.cdap.cdap.internal.app.store.RunRecordDetail) TriggerInfo(io.cdap.cdap.api.schedule.TriggerInfo) TriggeringScheduleInfo(io.cdap.cdap.api.schedule.TriggeringScheduleInfo) ProgramId(io.cdap.cdap.proto.id.ProgramId) CConfiguration(io.cdap.cdap.common.conf.CConfiguration) Constraint(io.cdap.cdap.internal.schedule.constraint.Constraint) AppRequest(io.cdap.cdap.proto.artifact.AppRequest) ArtifactSummary(io.cdap.cdap.api.artifact.ArtifactSummary) NamespaceId(io.cdap.cdap.proto.id.NamespaceId) DatasetId(io.cdap.cdap.proto.id.DatasetId) ScheduleId(io.cdap.cdap.proto.id.ScheduleId) Id(io.cdap.cdap.common.id.Id) MessageId(io.cdap.cdap.messaging.data.MessageId) ProfileId(io.cdap.cdap.proto.id.ProfileId) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) WorkflowId(io.cdap.cdap.proto.id.WorkflowId) TopicId(io.cdap.cdap.proto.id.TopicId) ProgramRunId(io.cdap.cdap.proto.id.ProgramRunId) ProgramId(io.cdap.cdap.proto.id.ProgramId) ArtifactId(io.cdap.cdap.api.artifact.ArtifactId) PartitionTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger) Category(org.junit.experimental.categories.Category) Test(org.junit.Test)

Example 2 with TriggeringScheduleInfo

use of io.cdap.cdap.api.schedule.TriggeringScheduleInfo in project cdap by caskdata.

the class OperationsDashboardHttpHandler method runRecordToDashboardRecord.

/**
 * Converts a {@link RunRecordDetail} to a {@link DashboardProgramRunRecord}
 */
@VisibleForTesting
static DashboardProgramRunRecord runRecordToDashboardRecord(RunRecordDetail meta) throws IOException {
    ProgramRunId runId = meta.getProgramRunId();
    String startMethod = MANUAL;
    String scheduleInfoJson = meta.getSystemArgs().get(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO);
    if (scheduleInfoJson != null) {
        TriggeringScheduleInfo scheduleInfo = GSON.fromJson(scheduleInfoJson, TriggeringScheduleInfo.class);
        // assume there's no composite trigger, since composite is not supported in the UI yet
        startMethod = scheduleInfo.getTriggerInfos().stream().findFirst().map(trigger -> TriggerInfo.Type.TIME.equals(trigger.getType()) ? SCHEDULED : TRIGGERED).orElse(MANUAL);
    }
    String user = meta.getPrincipal();
    if (user != null) {
        user = new KerberosName(user).getShortName();
    }
    return new DashboardProgramRunRecord(runId, meta, meta.getArtifactId(), user, startMethod);
}
Also used : DashboardProgramRunRecord(io.cdap.cdap.proto.ops.DashboardProgramRunRecord) TriggeringScheduleInfo(io.cdap.cdap.api.schedule.TriggeringScheduleInfo) ProgramRunId(io.cdap.cdap.proto.id.ProgramRunId) KerberosName(org.apache.hadoop.security.authentication.util.KerberosName) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 3 with TriggeringScheduleInfo

use of io.cdap.cdap.api.schedule.TriggeringScheduleInfo in project cdap by caskdata.

the class OperationsDashboardHttpHandlerTest method testDashboardReadWithScheduledRuns.

@Test
public void testDashboardReadWithScheduledRuns() throws Exception {
    long startTime1 = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
    RunId runId1 = RunIds.generate();
    NamespaceId ns3 = new NamespaceId("ns3");
    List<TriggerInfo> triggerInfos = new ArrayList<>();
    triggerInfos.add(new DefaultTimeTriggerInfo("*/5 * * * *", startTime1));
    TriggeringScheduleInfo triggeringScheduleInfo = new DefaultTriggeringScheduleInfo("test", "test", triggerInfos, new HashMap<>());
    RunRecordDetail.Builder metaBuilder = getMockRunRecordMeta(ns3, runId1);
    metaBuilder.setRunTime(startTime1);
    metaBuilder.setStatus(ProgramRunStatus.RUNNING);
    Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO, GSON.toJson(triggeringScheduleInfo));
    metaBuilder.setSystemArgs(systemArgs);
    RunRecordDetail meta = metaBuilder.build();
    writeRunRecordMeta(meta, startTime1);
    // write heart beat messages for 10 minutes (every minute) for this program run.
    long endTime = startTime1 + TimeUnit.MINUTES.toSeconds(5);
    long interval = TimeUnit.MINUTES.toSeconds(1);
    setUpProgramHeartBeats(meta, startTime1, endTime, interval);
    String opsDashboardQueryPath = String.format("%s/dashboard?start=%s&duration=%s&namespace=%s", BASE_PATH, String.valueOf(startTime1), String.valueOf(endTime), ns3.getNamespace());
    // get ops dashboard query results
    HttpResponse response = doGet(opsDashboardQueryPath);
    Assert.assertEquals(200, response.getResponseCode());
    String content = response.getResponseBodyAsString();
    List<DashboardProgramRunRecord> dashboardDetail = GSON.fromJson(content, DASHBOARD_DETAIL_TYPE);
    // assert the result contains 1 entry
    Assert.assertEquals(1, dashboardDetail.size());
    Assert.assertEquals(OperationsDashboardHttpHandler.runRecordToDashboardRecord(meta), dashboardDetail.iterator().next());
}
Also used : DefaultTriggeringScheduleInfo(io.cdap.cdap.internal.app.runtime.schedule.DefaultTriggeringScheduleInfo) DefaultTimeTriggerInfo(io.cdap.cdap.internal.app.runtime.schedule.trigger.DefaultTimeTriggerInfo) RunRecordDetail(io.cdap.cdap.internal.app.store.RunRecordDetail) DefaultTimeTriggerInfo(io.cdap.cdap.internal.app.runtime.schedule.trigger.DefaultTimeTriggerInfo) TriggerInfo(io.cdap.cdap.api.schedule.TriggerInfo) ArrayList(java.util.ArrayList) DashboardProgramRunRecord(io.cdap.cdap.proto.ops.DashboardProgramRunRecord) DefaultTriggeringScheduleInfo(io.cdap.cdap.internal.app.runtime.schedule.DefaultTriggeringScheduleInfo) TriggeringScheduleInfo(io.cdap.cdap.api.schedule.TriggeringScheduleInfo) HttpResponse(io.cdap.common.http.HttpResponse) NamespaceId(io.cdap.cdap.proto.id.NamespaceId) RunId(org.apache.twill.api.RunId) Test(org.junit.Test)

Example 4 with TriggeringScheduleInfo

use of io.cdap.cdap.api.schedule.TriggeringScheduleInfo in project cdap by caskdata.

the class ScheduleTaskRunner method launch.

public void launch(Job job) throws Exception {
    ProgramSchedule schedule = job.getSchedule();
    ProgramId programId = schedule.getProgramId();
    Map<String, String> userArgs = getUserArgs(schedule, propertiesResolver);
    Map<String, String> systemArgs = new HashMap<>(propertiesResolver.getSystemProperties(programId));
    // Let the triggers update the arguments first before setting the triggering schedule info
    ((SatisfiableTrigger) job.getSchedule().getTrigger()).updateLaunchArguments(job.getSchedule(), job.getNotifications(), userArgs, systemArgs);
    TriggeringScheduleInfo triggeringScheduleInfo = getTriggeringScheduleInfo(job);
    systemArgs.put(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO, GSON.toJson(triggeringScheduleInfo));
    try {
        execute(programId, systemArgs, userArgs);
        LOG.info("Successfully started program {} in schedule {}.", schedule.getProgramId(), schedule.getName());
    } catch (CapabilityNotAvailableException ex) {
        LOG.debug("Ignoring program {} in schedule {}.", schedule.getProgramId(), schedule.getName(), ex);
    }
}
Also used : SatisfiableTrigger(io.cdap.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger) CapabilityNotAvailableException(io.cdap.cdap.internal.capability.CapabilityNotAvailableException) HashMap(java.util.HashMap) TriggeringScheduleInfo(io.cdap.cdap.api.schedule.TriggeringScheduleInfo) ProgramId(io.cdap.cdap.proto.id.ProgramId)

Example 5 with TriggeringScheduleInfo

use of io.cdap.cdap.api.schedule.TriggeringScheduleInfo in project cdap by caskdata.

the class SmartWorkflow method initialize.

@Override
public void initialize(WorkflowContext context) throws Exception {
    super.initialize(context);
    context.enableFieldLineageConsolidation();
    TriggeringScheduleInfo scheduleInfo = context.getTriggeringScheduleInfo();
    if (scheduleInfo != null) {
        String propertiesMappingString = scheduleInfo.getProperties().get(TRIGGERING_PROPERTIES_MAPPING);
        if (propertiesMappingString != null) {
            TriggeringPropertyMapping propertiesMapping = GSON.fromJson(propertiesMappingString, TriggeringPropertyMapping.class);
            updateTokenWithTriggeringProperties(scheduleInfo, propertiesMapping, context.getToken());
        }
    }
    PipelineRuntime pipelineRuntime = new PipelineRuntime(context, workflowMetrics);
    WRAPPERLOGGER.info("Pipeline '{}' is started by user '{}' with arguments {}", context.getApplicationSpecification().getName(), UserGroupInformation.getCurrentUser().getShortUserName(), pipelineRuntime.getArguments().asMap());
    alertPublishers = new HashMap<>();
    postActions = new LinkedHashMap<>();
    spec = GSON.fromJson(context.getWorkflowSpecification().getProperty(Constants.PIPELINE_SPEC_KEY), BatchPipelineSpec.class);
    stageSpecs = new HashMap<>();
    MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(pipelineRuntime.getArguments(), context.getLogicalStartTime(), context, context, context.getNamespace());
    PluginContext pluginContext = new PipelinePluginContext(context, workflowMetrics, spec.isStageLoggingEnabled(), spec.isProcessTimingEnabled());
    for (ActionSpec actionSpec : spec.getEndingActions()) {
        String stageName = actionSpec.getName();
        postActions.put(stageName, pluginContext.newPluginInstance(stageName, macroEvaluator));
        stageSpecs.put(stageName, StageSpec.builder(stageName, actionSpec.getPluginSpec()).setStageLoggingEnabled(spec.isStageLoggingEnabled()).setProcessTimingEnabled(spec.isProcessTimingEnabled()).setMaxPreviewRecords(spec.getNumOfRecordsPreview()).build());
    }
    for (StageSpec stageSpec : spec.getStages()) {
        String stageName = stageSpec.getName();
        stageSpecs.put(stageName, stageSpec);
        if (AlertPublisher.PLUGIN_TYPE.equals(stageSpec.getPluginType())) {
            AlertPublisher alertPublisher = context.newPluginInstance(stageName, macroEvaluator);
            alertPublishers.put(stageName, alertPublisher);
        }
    }
    WRAPPERLOGGER.info("Pipeline '{}' running", context.getApplicationSpecification().getName());
}
Also used : PipelineRuntime(io.cdap.cdap.etl.common.PipelineRuntime) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) ActionSpec(io.cdap.cdap.etl.batch.ActionSpec) AlertPublisher(io.cdap.cdap.etl.api.AlertPublisher) PipelinePluginContext(io.cdap.cdap.etl.common.plugin.PipelinePluginContext) PluginContext(io.cdap.cdap.api.plugin.PluginContext) TriggeringScheduleInfo(io.cdap.cdap.api.schedule.TriggeringScheduleInfo) BatchPipelineSpec(io.cdap.cdap.etl.batch.BatchPipelineSpec) TriggeringPropertyMapping(io.cdap.cdap.etl.proto.v2.TriggeringPropertyMapping) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) PipelinePluginContext(io.cdap.cdap.etl.common.plugin.PipelinePluginContext)

Aggregations

TriggeringScheduleInfo (io.cdap.cdap.api.schedule.TriggeringScheduleInfo)6 TriggerInfo (io.cdap.cdap.api.schedule.TriggerInfo)3 Test (org.junit.Test)3 RunRecordDetail (io.cdap.cdap.internal.app.store.RunRecordDetail)2 NamespaceId (io.cdap.cdap.proto.id.NamespaceId)2 ProgramRunId (io.cdap.cdap.proto.id.ProgramRunId)2 DashboardProgramRunRecord (io.cdap.cdap.proto.ops.DashboardProgramRunRecord)2 HttpResponse (io.cdap.common.http.HttpResponse)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 ImmutableList (com.google.common.collect.ImmutableList)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 Iterables (com.google.common.collect.Iterables)1 Lists (com.google.common.collect.Lists)1 Service (com.google.common.util.concurrent.Service)1 Gson (com.google.gson.Gson)1 GsonBuilder (com.google.gson.GsonBuilder)1 TypeToken (com.google.gson.reflect.TypeToken)1 AppWithFrequentScheduledWorkflows (io.cdap.cdap.AppWithFrequentScheduledWorkflows)1 AppWithMultipleSchedules (io.cdap.cdap.AppWithMultipleSchedules)1 WorkflowAppWithFork (io.cdap.cdap.WorkflowAppWithFork)1