use of io.cdap.cdap.api.schedule.TriggerInfo in project cdap by caskdata.
the class TriggerInfoDeserializer method deserialize.
@Override
public TriggerInfo deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
if (json == null) {
return null;
}
if (!(json instanceof JsonObject)) {
throw new JsonParseException("Expected a JsonObject but found a " + json.getClass().getName());
}
JsonObject object = (JsonObject) json;
JsonElement typeJson = object.get("type");
TriggerInfo.Type triggerType = context.deserialize(typeJson, TriggerInfo.Type.class);
return new SimpleTriggerInfo(triggerType);
}
use of io.cdap.cdap.api.schedule.TriggerInfo in project cdap by caskdata.
the class TimeTrigger method getTriggerInfos.
@Override
public List<TriggerInfo> getTriggerInfos(TriggerInfoContext context) {
for (Notification notification : context.getNotifications()) {
if (!isSatisfied(context.getSchedule(), notification)) {
continue;
}
Long logicalStartTime = getLogicalStartTime(notification);
if (logicalStartTime == null) {
LOG.warn("The notification '{}' in the job of schedule '{}' does not contain logical start time", notification, context.getSchedule());
continue;
}
TriggerInfo triggerInfo = new DefaultTimeTriggerInfo(getCronExpression(), logicalStartTime);
return Collections.singletonList(triggerInfo);
}
return Collections.emptyList();
}
use of io.cdap.cdap.api.schedule.TriggerInfo in project cdap by caskdata.
the class SmartWorkflow method updateTokenWithTriggeringProperties.
private void updateTokenWithTriggeringProperties(TriggeringScheduleInfo scheduleInfo, TriggeringPropertyMapping propertiesMapping, WorkflowToken token) {
List<ProgramStatusTriggerInfo> programStatusTriggerInfos = new ArrayList<>();
for (TriggerInfo info : scheduleInfo.getTriggerInfos()) {
if (info instanceof ProgramStatusTriggerInfo) {
programStatusTriggerInfos.add((ProgramStatusTriggerInfo) info);
}
}
// If no ProgramStatusTriggerInfo, no need of override the existing runtimeArgs
if (programStatusTriggerInfos.isEmpty()) {
return;
}
// Currently only expecting one trigger in a schedule
ProgramStatusTriggerInfo triggerInfo = programStatusTriggerInfos.get(0);
BasicArguments triggeringArguments = new BasicArguments(triggerInfo.getWorkflowToken(), triggerInfo.getRuntimeArguments());
// Get the value of every triggering pipeline arguments specified in the propertiesMapping and update newRuntimeArgs
List<ArgumentMapping> argumentMappings = propertiesMapping.getArguments();
for (ArgumentMapping mapping : argumentMappings) {
String sourceKey = mapping.getSource();
if (sourceKey == null) {
LOG.warn("The name of argument from the triggering pipeline cannot be null, " + "skip this argument mapping: '{}'.", mapping);
continue;
}
String value = triggeringArguments.get(sourceKey);
if (value == null) {
LOG.warn("Runtime argument '{}' is not found in run '{}' of the triggering pipeline '{}' " + "in namespace '{}' ", sourceKey, triggerInfo.getRunId(), triggerInfo.getApplicationName(), triggerInfo.getNamespace());
continue;
}
// Use the argument name in the triggering pipeline if target is not specified
String targetKey = mapping.getTarget() == null ? sourceKey : mapping.getTarget();
token.put(targetKey, value);
}
// Get the resolved plugin properties map from triggering pipeline's workflow token in triggeringArguments
Map<String, Map<String, String>> resolvedProperties = GSON.fromJson(triggeringArguments.get(RESOLVED_PLUGIN_PROPERTIES_MAP), STAGE_PROPERTIES_MAP);
for (PluginPropertyMapping mapping : propertiesMapping.getPluginProperties()) {
String stageName = mapping.getStageName();
if (stageName == null) {
LOG.warn("The name of the stage cannot be null in plugin property mapping, skip this mapping: '{}'.", mapping);
continue;
}
Map<String, String> pluginProperties = resolvedProperties.get(stageName);
if (pluginProperties == null) {
LOG.warn("No plugin properties can be found with stage name '{}' in triggering pipeline '{}' " + "in namespace '{}' ", mapping.getStageName(), triggerInfo.getApplicationName(), triggerInfo.getNamespace());
continue;
}
String sourceKey = mapping.getSource();
if (sourceKey == null) {
LOG.warn("The name of argument from the triggering pipeline cannot be null, " + "skip this argument mapping: '{}'.", mapping);
continue;
}
String value = pluginProperties.get(sourceKey);
if (value == null) {
LOG.warn("No property with name '{}' can be found in plugin '{}' of the triggering pipeline '{}' " + "in namespace '{}' ", sourceKey, stageName, triggerInfo.getApplicationName(), triggerInfo.getNamespace());
continue;
}
// Use the argument name in the triggering pipeline if target is not specified
String targetKey = mapping.getTarget() == null ? sourceKey : mapping.getTarget();
token.put(targetKey, value);
}
}
use of io.cdap.cdap.api.schedule.TriggerInfo in project cdap by caskdata.
the class CoreSchedulerServiceTest method testRunScheduledJobs.
@Test
@Category(XSlowTests.class)
public void testRunScheduledJobs() throws Exception {
CConfiguration cConf = getInjector().getInstance(CConfiguration.class);
dataEventTopic = NamespaceId.SYSTEM.topic(cConf.get(Constants.Dataset.DATA_EVENT_TOPIC));
// Deploy the app with version
Id.Artifact appArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "appwithschedules", VERSION1);
addAppArtifact(appArtifactId, AppWithFrequentScheduledWorkflows.class);
AppRequest<? extends Config> appRequest = new AppRequest<>(new ArtifactSummary(appArtifactId.getName(), appArtifactId.getVersion().getVersion()));
deploy(APP_ID, appRequest);
// Resume the schedule because schedules are initialized as paused
enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1);
enableSchedule(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_2);
enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
enableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
for (int i = 0; i < 5; i++) {
testNewPartition(i + 1);
}
// Enable COMPOSITE_SCHEDULE before publishing events to DATASET_NAME2
enableSchedule(AppWithFrequentScheduledWorkflows.COMPOSITE_SCHEDULE);
// disable the two partition schedules, send them notifications (but they should not trigger)
int runs1 = getRuns(WORKFLOW_1, ProgramRunStatus.ALL);
int runs2 = getRuns(WORKFLOW_2, ProgramRunStatus.ALL);
disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1);
// ensure schedule 2 is disabled after schedule 1
Thread.sleep(BUFFER);
long disableBeforeTime = System.currentTimeMillis();
disableSchedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2);
long disableAfterTime = System.currentTimeMillis() + 1;
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME1);
long minPublishTime = System.currentTimeMillis();
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
// This would make sure the subscriber has processed the data event
waitUntilProcessed(dataEventTopic, minPublishTime);
// Both workflows must run at least once.
// If the testNewPartition() loop took longer than expected, it may be more (quartz fired multiple times)
Tasks.waitFor(true, () -> getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.COMPLETED) > 0 && getRuns(SCHEDULED_WORKFLOW_2, ProgramRunStatus.COMPLETED) > 0, 10, TimeUnit.SECONDS);
// There shouldn't be any partition trigger in the job queue
Assert.assertFalse(Iterables.any(getAllJobs(), job -> job.getSchedule().getTrigger() instanceof ProtoTrigger.PartitionTrigger));
ProgramId compositeWorkflow = APP_ID.workflow(AppWithFrequentScheduledWorkflows.COMPOSITE_WORKFLOW);
// Workflow scheduled with the composite trigger has never been started
Assert.assertEquals(0, getRuns(compositeWorkflow, ProgramRunStatus.ALL));
// Publish two more new partition notifications to satisfy the partition trigger in the composite trigger,
// and thus the whole composite trigger will be satisfied
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
minPublishTime = System.currentTimeMillis();
publishNotification(dataEventTopic, NamespaceId.DEFAULT, AppWithFrequentScheduledWorkflows.DATASET_NAME2);
// This would make sure the subscriber has processed the data event
waitUntilProcessed(dataEventTopic, minPublishTime);
// Wait for 1 run to complete for compositeWorkflow
waitForCompleteRuns(1, compositeWorkflow);
for (RunRecordDetail runRecordMeta : store.getRuns(SCHEDULED_WORKFLOW_1, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, Integer.MAX_VALUE).values()) {
Map<String, String> sysArgs = runRecordMeta.getSystemArgs();
Assert.assertNotNull(sysArgs);
TriggeringScheduleInfo scheduleInfo = GSON.fromJson(sysArgs.get(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO), TriggeringScheduleInfo.class);
Assert.assertEquals(AppWithFrequentScheduledWorkflows.TEN_SECOND_SCHEDULE_1, scheduleInfo.getName());
List<TriggerInfo> triggerInfos = scheduleInfo.getTriggerInfos();
// Only one notification is enough to satisfy Time Trigger
Assert.assertEquals(1, triggerInfos.size());
Assert.assertEquals(TriggerInfo.Type.TIME, triggerInfos.get(0).getType());
}
// Also verify that the two partition schedules did not trigger
Assert.assertEquals(runs1, getRuns(WORKFLOW_1, ProgramRunStatus.ALL));
Assert.assertEquals(runs2, getRuns(WORKFLOW_2, ProgramRunStatus.ALL));
// enable partition schedule 2 and test reEnableSchedules
scheduler.reEnableSchedules(NamespaceId.DEFAULT, disableBeforeTime, disableAfterTime);
Assert.assertEquals(ProgramScheduleStatus.SCHEDULED, scheduler.getScheduleStatus(APP_ID.schedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_2)));
Assert.assertEquals(ProgramScheduleStatus.SUSPENDED, scheduler.getScheduleStatus(APP_ID.schedule(AppWithFrequentScheduledWorkflows.DATASET_PARTITION_SCHEDULE_1)));
testScheduleUpdate("disable");
testScheduleUpdate("update");
testScheduleUpdate("delete");
}
use of io.cdap.cdap.api.schedule.TriggerInfo in project cdap by caskdata.
the class OperationsDashboardHttpHandlerTest method testDashboardReadWithScheduledRuns.
@Test
public void testDashboardReadWithScheduledRuns() throws Exception {
long startTime1 = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
RunId runId1 = RunIds.generate();
NamespaceId ns3 = new NamespaceId("ns3");
List<TriggerInfo> triggerInfos = new ArrayList<>();
triggerInfos.add(new DefaultTimeTriggerInfo("*/5 * * * *", startTime1));
TriggeringScheduleInfo triggeringScheduleInfo = new DefaultTriggeringScheduleInfo("test", "test", triggerInfos, new HashMap<>());
RunRecordDetail.Builder metaBuilder = getMockRunRecordMeta(ns3, runId1);
metaBuilder.setRunTime(startTime1);
metaBuilder.setStatus(ProgramRunStatus.RUNNING);
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO, GSON.toJson(triggeringScheduleInfo));
metaBuilder.setSystemArgs(systemArgs);
RunRecordDetail meta = metaBuilder.build();
writeRunRecordMeta(meta, startTime1);
// write heart beat messages for 10 minutes (every minute) for this program run.
long endTime = startTime1 + TimeUnit.MINUTES.toSeconds(5);
long interval = TimeUnit.MINUTES.toSeconds(1);
setUpProgramHeartBeats(meta, startTime1, endTime, interval);
String opsDashboardQueryPath = String.format("%s/dashboard?start=%s&duration=%s&namespace=%s", BASE_PATH, String.valueOf(startTime1), String.valueOf(endTime), ns3.getNamespace());
// get ops dashboard query results
HttpResponse response = doGet(opsDashboardQueryPath);
Assert.assertEquals(200, response.getResponseCode());
String content = response.getResponseBodyAsString();
List<DashboardProgramRunRecord> dashboardDetail = GSON.fromJson(content, DASHBOARD_DETAIL_TYPE);
// assert the result contains 1 entry
Assert.assertEquals(1, dashboardDetail.size());
Assert.assertEquals(OperationsDashboardHttpHandler.runRecordToDashboardRecord(meta), dashboardDetail.iterator().next());
}
Aggregations