Search in sources :

Example 1 with Constraint

use of co.cask.cdap.internal.schedule.constraint.Constraint in project cdap by caskdata.

the class ProgramLifecycleHttpHandler method toScheduleDetail.

private ScheduleDetail toScheduleDetail(ScheduleUpdateDetail updateDetail, ProgramSchedule existing) {
    ScheduleUpdateDetail.Schedule scheduleUpdate = updateDetail.getSchedule();
    if (scheduleUpdate == null) {
        return new ScheduleDetail(null, null, null, updateDetail.getProperties(), null, null, null);
    }
    Trigger trigger = null;
    if (scheduleUpdate.getCronExpression() != null && (scheduleUpdate.getStreamName() != null || scheduleUpdate.getDataTriggerMB() != null)) {
        throw new IllegalArgumentException(String.format("Cannot define time trigger with cron expression and define stream size trigger with" + " stream name and data trigger configuration in the same schedule update details %s. " + "Schedule update detail must contain only one trigger.", updateDetail));
    }
    NamespaceId namespaceId = existing.getProgramId().getNamespaceId();
    if (scheduleUpdate.getCronExpression() != null) {
        trigger = new TimeTrigger(updateDetail.getSchedule().getCronExpression());
    } else if (existing.getTrigger() instanceof StreamSizeTrigger) {
        // if the existing trigger is StreamSizeTrigger, use the field in the existing trigger if the corresponding field
        // in schedule update detail is null
        StreamSizeTrigger existingTrigger = (StreamSizeTrigger) existing.getTrigger();
        String streamName = Objects.firstNonNull(scheduleUpdate.getStreamName(), existingTrigger.getStreamId().getStream());
        int dataTriggerMB = Objects.firstNonNull(scheduleUpdate.getDataTriggerMB(), existingTrigger.getTriggerMB());
        trigger = new StreamSizeTrigger(namespaceId.stream(streamName), dataTriggerMB);
    } else if (scheduleUpdate.getStreamName() != null && scheduleUpdate.getDataTriggerMB() != null) {
        trigger = new StreamSizeTrigger(namespaceId.stream(scheduleUpdate.getStreamName()), scheduleUpdate.getDataTriggerMB());
    } else if (scheduleUpdate.getStreamName() != null || scheduleUpdate.getDataTriggerMB() != null) {
        throw new IllegalArgumentException(String.format("Only one of stream name and data trigger MB is defined in schedule update details %s. " + "Must provide both stream name and data trigger MB to update the existing schedule with " + "trigger of type %s to a schedule with stream size trigger.", updateDetail, existing.getTrigger().getClass()));
    }
    List<Constraint> constraints = toConstraints(scheduleUpdate.getRunConstraints());
    return new ScheduleDetail(null, scheduleUpdate.getDescription(), null, updateDetail.getProperties(), trigger, constraints, null);
}
Also used : StreamSizeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.StreamSizeTrigger) TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) Trigger(co.cask.cdap.internal.schedule.trigger.Trigger) TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) ProtoConstraint(co.cask.cdap.proto.ProtoConstraint) StreamSizeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.StreamSizeTrigger) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ScheduleUpdateDetail(co.cask.cdap.proto.ScheduleUpdateDetail)

Example 2 with Constraint

use of co.cask.cdap.internal.schedule.constraint.Constraint in project cdap by caskdata.

the class DataPipelineTest method deployPipelineWithSchedule.

private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
    String tableName = "actionScheduleTable" + pipelineName + engine;
    String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
    String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
    String key1 = key1Mapping.getTarget();
    String key2 = key2Mapping.getTarget();
    ETLBatchConfig etlConfig = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
    AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
    ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
    ApplicationManager appManager = deployApplication(appId, appRequest);
    // there should be only two programs - one workflow and one mapreduce/spark
    Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
    // Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
    StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
    StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
    StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
    // write one record to each source
    DataSetManager<Table> inputManager = getDataset(sourceName);
    MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
    String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
    // Use properties from the triggering pipeline as values for runtime argument key1, key2
    TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
    ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
    ScheduleId scheduleId = appId.schedule("completeSchedule");
    appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.<Constraint>of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null));
    appManager.enableSchedule(scheduleId);
    return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
Also used : ApplicationManager(co.cask.cdap.test.ApplicationManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Table(co.cask.cdap.api.dataset.table.Table) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) Schema(co.cask.cdap.api.data.schema.Schema) WorkflowId(co.cask.cdap.proto.id.WorkflowId) ScheduleId(co.cask.cdap.proto.id.ScheduleId) StructuredRecord(co.cask.cdap.api.data.format.StructuredRecord) AppRequest(co.cask.cdap.proto.artifact.AppRequest) ETLBatchConfig(co.cask.cdap.etl.proto.v2.ETLBatchConfig) ETLStage(co.cask.cdap.etl.proto.v2.ETLStage) TriggeringPropertyMapping(co.cask.cdap.etl.proto.v2.TriggeringPropertyMapping) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ScheduleProgramInfo(co.cask.cdap.api.workflow.ScheduleProgramInfo)

Example 3 with Constraint

use of co.cask.cdap.internal.schedule.constraint.Constraint in project cdap by caskdata.

the class ConstraintCodecTest method testSerDeser.

private void testSerDeser(ProtoConstraint proto, Constraint constraint) {
    String jsonOfConstraint = GSON.toJson(constraint);
    String jsonOfConstraintAsConstraint = GSON.toJson(constraint, Constraint.class);
    String jsonOfProto = GSON.toJson(proto);
    String jsonOfProtoAsConstraint = GSON.toJson(proto, Constraint.class);
    String jsonOfConstraintByProto = GSON_PROTO.toJson(constraint);
    String jsonOfConstraintAsConstraintByProto = GSON_PROTO.toJson(constraint, Constraint.class);
    String jsonOfProtoByProto = GSON_PROTO.toJson(proto);
    String jsonOfProtoAsConstraintByProto = GSON_PROTO.toJson(proto, Constraint.class);
    Assert.assertEquals(jsonOfConstraint, jsonOfConstraintAsConstraint);
    Assert.assertEquals(jsonOfConstraint, jsonOfProto);
    Assert.assertEquals(jsonOfConstraint, jsonOfProtoAsConstraint);
    Assert.assertEquals(jsonOfConstraint, jsonOfConstraintByProto);
    Assert.assertEquals(jsonOfConstraint, jsonOfConstraintAsConstraintByProto);
    Assert.assertEquals(jsonOfConstraint, jsonOfProtoByProto);
    Assert.assertEquals(jsonOfConstraint, jsonOfProtoAsConstraintByProto);
    Constraint deserialized = GSON.fromJson(jsonOfConstraint, Constraint.class);
    Constraint deserializedAsProto = GSON_PROTO.fromJson(jsonOfConstraint, Constraint.class);
    Assert.assertEquals(constraint, deserialized);
    Assert.assertEquals(proto, deserializedAsProto);
}
Also used : Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) ProtoConstraint(co.cask.cdap.proto.ProtoConstraint)

Example 4 with Constraint

use of co.cask.cdap.internal.schedule.constraint.Constraint in project cdap by caskdata.

the class Schedulers method toScheduleCreationSpec.

public static ScheduleCreationSpec toScheduleCreationSpec(NamespaceId deployNamespace, Schedule schedule, String programName, Map<String, String> properties) {
    Trigger trigger;
    if (schedule instanceof TimeSchedule) {
        trigger = new TimeTrigger(((TimeSchedule) schedule).getCronEntry());
    } else {
        StreamSizeSchedule streamSizeSchedule = ((StreamSizeSchedule) schedule);
        trigger = new StreamSizeTrigger(deployNamespace.stream(streamSizeSchedule.getStreamName()), streamSizeSchedule.getDataTriggerMB());
    }
    Integer maxConcurrentRuns = schedule.getRunConstraints().getMaxConcurrentRuns();
    List<Constraint> constraints = maxConcurrentRuns == null ? ImmutableList.<Constraint>of() : ImmutableList.<Constraint>of(new ConcurrencyConstraint(maxConcurrentRuns));
    return new ScheduleCreationSpec(schedule.getName(), schedule.getDescription(), programName, properties, trigger, constraints, Schedulers.JOB_QUEUE_TIMEOUT_MILLIS);
}
Also used : ConcurrencyConstraint(co.cask.cdap.internal.app.runtime.schedule.constraint.ConcurrencyConstraint) StreamSizeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.StreamSizeTrigger) TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) Trigger(co.cask.cdap.internal.schedule.trigger.Trigger) TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) ConcurrencyConstraint(co.cask.cdap.internal.app.runtime.schedule.constraint.ConcurrencyConstraint) StreamSizeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.StreamSizeTrigger) TimeSchedule(co.cask.cdap.internal.schedule.TimeSchedule) StreamSizeSchedule(co.cask.cdap.internal.schedule.StreamSizeSchedule) ScheduleCreationSpec(co.cask.cdap.internal.schedule.ScheduleCreationSpec)

Example 5 with Constraint

use of co.cask.cdap.internal.schedule.constraint.Constraint in project cdap by caskdata.

the class Schedulers method toProgramSchedule.

public static ProgramSchedule toProgramSchedule(ApplicationId appId, ScheduleSpecification spec) {
    Schedule schedule = spec.getSchedule();
    ProgramType programType = ProgramType.valueOfSchedulableType(spec.getProgram().getProgramType());
    ProgramId programId = appId.program(programType, spec.getProgram().getProgramName());
    Trigger trigger;
    if (schedule instanceof TimeSchedule) {
        TimeSchedule timeSchedule = (TimeSchedule) schedule;
        trigger = new TimeTrigger(timeSchedule.getCronEntry());
    } else {
        StreamSizeSchedule streamSchedule = (StreamSizeSchedule) schedule;
        StreamId streamId = programId.getNamespaceId().stream(streamSchedule.getStreamName());
        trigger = new StreamSizeTrigger(streamId, streamSchedule.getDataTriggerMB());
    }
    Integer maxConcurrentRuns = schedule.getRunConstraints().getMaxConcurrentRuns();
    List<Constraint> constraints = maxConcurrentRuns == null ? ImmutableList.<Constraint>of() : ImmutableList.<Constraint>of(new ConcurrencyConstraint(maxConcurrentRuns));
    return new ProgramSchedule(schedule.getName(), schedule.getDescription(), programId, spec.getProperties(), trigger, constraints);
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) ConcurrencyConstraint(co.cask.cdap.internal.app.runtime.schedule.constraint.ConcurrencyConstraint) TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) ConcurrencyConstraint(co.cask.cdap.internal.app.runtime.schedule.constraint.ConcurrencyConstraint) ProgramId(co.cask.cdap.proto.id.ProgramId) StreamSizeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.StreamSizeTrigger) TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) Trigger(co.cask.cdap.internal.schedule.trigger.Trigger) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) StreamSizeSchedule(co.cask.cdap.internal.schedule.StreamSizeSchedule) Schedule(co.cask.cdap.api.schedule.Schedule) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) TimeSchedule(co.cask.cdap.internal.schedule.TimeSchedule) StreamSizeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.StreamSizeTrigger) TimeSchedule(co.cask.cdap.internal.schedule.TimeSchedule) ProgramType(co.cask.cdap.proto.ProgramType) StreamSizeSchedule(co.cask.cdap.internal.schedule.StreamSizeSchedule)

Aggregations

Constraint (co.cask.cdap.internal.schedule.constraint.Constraint)10 ProtoConstraint (co.cask.cdap.proto.ProtoConstraint)5 ScheduleDetail (co.cask.cdap.proto.ScheduleDetail)5 StreamSizeTrigger (co.cask.cdap.internal.app.runtime.schedule.trigger.StreamSizeTrigger)4 TimeTrigger (co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger)4 StreamSizeSchedule (co.cask.cdap.internal.schedule.StreamSizeSchedule)4 TimeSchedule (co.cask.cdap.internal.schedule.TimeSchedule)4 Trigger (co.cask.cdap.internal.schedule.trigger.Trigger)4 ScheduleProgramInfo (co.cask.cdap.api.workflow.ScheduleProgramInfo)3 ApplicationId (co.cask.cdap.proto.id.ApplicationId)3 NamespaceId (co.cask.cdap.proto.id.NamespaceId)3 ScheduleId (co.cask.cdap.proto.id.ScheduleId)3 Schedule (co.cask.cdap.api.schedule.Schedule)2 CommandInputError (co.cask.cdap.cli.exception.CommandInputError)2 ConcurrencyConstraint (co.cask.cdap.internal.app.runtime.schedule.constraint.ConcurrencyConstraint)2 ProtoTrigger (co.cask.cdap.proto.ProtoTrigger)2 StreamId (co.cask.cdap.proto.id.StreamId)2 StructuredRecord (co.cask.cdap.api.data.format.StructuredRecord)1 Schema (co.cask.cdap.api.data.schema.Schema)1 KeyValueTable (co.cask.cdap.api.dataset.lib.KeyValueTable)1