use of co.cask.cdap.api.schedule.Schedule in project cdap by caskdata.
the class AddTimeScheduleCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream printStream) throws Exception {
String scheduleName = arguments.get(ArgumentName.SCHEDULE_NAME.toString());
String[] programIdParts = arguments.get(ArgumentName.PROGRAM.toString()).split("\\.");
String version = arguments.getOptional(ArgumentName.APP_VERSION.toString());
String scheduleDescription = arguments.getOptional(ArgumentName.DESCRIPTION.toString(), "");
String cronExpression = arguments.get(ArgumentName.CRON_EXPRESSION.toString());
String schedulePropertiesString = arguments.getOptional(ArgumentName.SCHEDULE_PROPERTIES.toString(), "");
String scheduleRunConcurrencyString = arguments.getOptional(ArgumentName.CONCURRENCY.toString(), null);
if (programIdParts.length < 2) {
throw new CommandInputError(this);
}
String appId = programIdParts[0];
NamespaceId namespaceId = cliConfig.getCurrentNamespace();
ApplicationId applicationId = (version == null) ? namespaceId.app(appId) : namespaceId.app(appId, version);
ScheduleId scheduleId = applicationId.schedule(scheduleName);
Schedules.Builder builder = Schedules.builder(scheduleName);
if (scheduleRunConcurrencyString != null) {
builder.setMaxConcurrentRuns(Integer.valueOf(scheduleRunConcurrencyString));
}
if (scheduleDescription != null) {
builder.setDescription(scheduleDescription);
}
Schedule schedule = builder.createTimeSchedule(cronExpression);
Map<String, String> programMap = ImmutableMap.of("programName", programIdParts[1], "programType", ElementType.WORKFLOW.name().toUpperCase());
Map<String, String> propertiesMap = ArgumentParser.parseMap(schedulePropertiesString, ArgumentName.SCHEDULE_PROPERTIES.toString());
ScheduleInstanceConfiguration configuration = new ScheduleInstanceConfiguration("TIME", schedule, programMap, propertiesMap);
scheduleClient.add(scheduleId, configuration);
printStream.printf("Successfully added schedule '%s' in app '%s'\n", scheduleName, appId);
}
use of co.cask.cdap.api.schedule.Schedule in project cdap by caskdata.
the class UpdateTimeScheduleCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream printStream) throws Exception {
String scheduleName = arguments.get(ArgumentName.SCHEDULE_NAME.toString());
String[] programIdParts = arguments.get(ArgumentName.PROGRAM.toString()).split("\\.");
String version = arguments.getOptional(ArgumentName.APP_VERSION.toString());
String scheduleDescription = arguments.getOptional(ArgumentName.DESCRIPTION.toString(), "");
String cronExpression = arguments.get(ArgumentName.CRON_EXPRESSION.toString());
String schedulePropertiesString = arguments.getOptional(ArgumentName.SCHEDULE_PROPERTIES.toString(), "");
String scheduleRunConcurrencyString = arguments.getOptional(ArgumentName.CONCURRENCY.toString(), null);
if (programIdParts.length < 2) {
throw new CommandInputError(this);
}
String appId = programIdParts[0];
NamespaceId namespaceId = cliConfig.getCurrentNamespace();
ApplicationId applicationId = (version == null) ? namespaceId.app(appId) : namespaceId.app(appId, version);
ScheduleId scheduleId = applicationId.schedule(scheduleName);
Schedules.Builder builder = Schedules.builder(scheduleName);
if (scheduleRunConcurrencyString != null) {
builder.setMaxConcurrentRuns(Integer.valueOf(scheduleRunConcurrencyString));
}
if (scheduleDescription != null) {
builder.setDescription(scheduleDescription);
}
Schedule schedule = builder.createTimeSchedule(cronExpression);
Map<String, String> programMap = ImmutableMap.of("programName", programIdParts[1], "programType", ElementType.WORKFLOW.name().toUpperCase());
Map<String, String> propertiesMap = ArgumentParser.parseMap(schedulePropertiesString, ArgumentName.SCHEDULE_PROPERTIES.toString());
ScheduleInstanceConfiguration configuration = new ScheduleInstanceConfiguration("TIME", schedule, programMap, propertiesMap);
scheduleClient.update(scheduleId, configuration);
printStream.printf("Successfully updated schedule '%s' in app '%s'\n", scheduleName, appId);
}
use of co.cask.cdap.api.schedule.Schedule in project cdap by caskdata.
the class Schedulers method toProgramSchedule.
public static ProgramSchedule toProgramSchedule(ApplicationId appId, ScheduleSpecification spec) {
Schedule schedule = spec.getSchedule();
ProgramType programType = ProgramType.valueOfSchedulableType(spec.getProgram().getProgramType());
ProgramId programId = appId.program(programType, spec.getProgram().getProgramName());
Trigger trigger;
if (schedule instanceof TimeSchedule) {
TimeSchedule timeSchedule = (TimeSchedule) schedule;
trigger = new TimeTrigger(timeSchedule.getCronEntry());
} else {
StreamSizeSchedule streamSchedule = (StreamSizeSchedule) schedule;
StreamId streamId = programId.getNamespaceId().stream(streamSchedule.getStreamName());
trigger = new StreamSizeTrigger(streamId, streamSchedule.getDataTriggerMB());
}
Integer maxConcurrentRuns = schedule.getRunConstraints().getMaxConcurrentRuns();
List<Constraint> constraints = maxConcurrentRuns == null ? ImmutableList.<Constraint>of() : ImmutableList.<Constraint>of(new ConcurrencyConstraint(maxConcurrentRuns));
return new ProgramSchedule(schedule.getName(), schedule.getDescription(), programId, spec.getProperties(), trigger, constraints);
}
use of co.cask.cdap.api.schedule.Schedule in project cdap by caskdata.
the class ScheduleDetail method toScheduleSpec.
/**
* Return an equivalent schedule specification, or null if there is no equivalent one.
*/
@Deprecated
@Nullable
public ScheduleSpecification toScheduleSpec() {
RunConstraints constraints = RunConstraints.NONE;
if (getConstraints() != null) {
for (Constraint runConstraint : getConstraints()) {
if (runConstraint instanceof ProtoConstraint.ConcurrencyConstraint) {
constraints = new RunConstraints(((ProtoConstraint.ConcurrencyConstraint) runConstraint).getMaxConcurrency());
break;
}
}
}
Schedule schedule;
if (getTrigger() instanceof ProtoTrigger.TimeTrigger) {
ProtoTrigger.TimeTrigger trigger = ((ProtoTrigger.TimeTrigger) getTrigger());
schedule = new TimeSchedule(getName(), getDescription(), trigger.getCronExpression(), constraints);
} else if (getTrigger() instanceof ProtoTrigger.StreamSizeTrigger) {
ProtoTrigger.StreamSizeTrigger trigger = (ProtoTrigger.StreamSizeTrigger) getTrigger();
schedule = new StreamSizeSchedule(getName(), getDescription(), trigger.getStreamId().getStream(), trigger.getTriggerMB(), constraints);
} else {
return null;
}
return new ScheduleSpecification(schedule, getProgram(), getProperties());
}
use of co.cask.cdap.api.schedule.Schedule in project cdap by caskdata.
the class ScheduleSpecificationCodec method deserialize.
@Override
public ScheduleSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
JsonElement scheduleTypeJson = jsonObj.get("scheduleType");
ScheduleType scheduleType;
if (scheduleTypeJson == null) {
// For backwards compatibility with spec persisted with older versions than 2.8, we need these lines
scheduleType = null;
} else {
scheduleType = context.deserialize(jsonObj.get("scheduleType"), ScheduleType.class);
}
Schedule schedule = null;
if (scheduleType == null) {
JsonObject scheduleObj = jsonObj.get("schedule").getAsJsonObject();
String name = context.deserialize(scheduleObj.get("name"), String.class);
String description = context.deserialize(scheduleObj.get("description"), String.class);
String cronEntry = context.deserialize(scheduleObj.get("cronEntry"), String.class);
schedule = Schedules.builder(name).setDescription(description).createTimeSchedule(cronEntry);
} else {
switch(scheduleType) {
case TIME:
schedule = context.deserialize(jsonObj.get("schedule"), TimeSchedule.class);
break;
case STREAM:
schedule = context.deserialize(jsonObj.get("schedule"), StreamSizeSchedule.class);
break;
}
}
ScheduleProgramInfo program = context.deserialize(jsonObj.get("program"), ScheduleProgramInfo.class);
Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
return new ScheduleSpecification(schedule, program, properties);
}
Aggregations