use of co.cask.cdap.api.workflow.ScheduleProgramInfo in project cdap by caskdata.
the class DefaultAppConfigurer method addSchedule.
@Override
public void addSchedule(Schedule schedule, SchedulableProgramType programType, String programName, Map<String, String> properties) {
Preconditions.checkNotNull(schedule, "Schedule cannot be null.");
Preconditions.checkNotNull(schedule.getName(), "Schedule name cannot be null.");
Preconditions.checkArgument(!schedule.getName().isEmpty(), "Schedule name cannot be empty.");
Preconditions.checkNotNull(programName, "Program name cannot be null.");
Preconditions.checkArgument(!programName.isEmpty(), "Program name cannot be empty.");
Preconditions.checkArgument(!schedules.containsKey(schedule.getName()), "Schedule with the name '" + schedule.getName() + "' already exists.");
if (schedule instanceof StreamSizeSchedule) {
Preconditions.checkArgument(((StreamSizeSchedule) schedule).getDataTriggerMB() > 0, "Schedule data trigger must be greater than 0.");
}
// TODO: [CDAP-11575] Temporary solution before REST API is merged. ScheduleSpecification will be removed and
// the block of code below will be refactored
ScheduleSpecification spec = new ScheduleSpecification(schedule, new ScheduleProgramInfo(programType, programName), properties);
schedules.put(schedule.getName(), spec);
ScheduleCreationSpec creationSpec = Schedulers.toScheduleCreationSpec(deployNamespace.toEntityId(), schedule, programName, properties);
doAddSchedule(creationSpec);
}
use of co.cask.cdap.api.workflow.ScheduleProgramInfo in project cdap by caskdata.
the class DistributedWorkflowProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(LaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
WorkflowSpecification spec = program.getApplicationSpecification().getWorkflows().get(program.getName());
List<ClassAcceptor> acceptors = new ArrayList<>();
// Only interested in MapReduce and Spark nodes
Set<SchedulableProgramType> runnerTypes = EnumSet.of(SchedulableProgramType.MAPREDUCE, SchedulableProgramType.SPARK);
for (WorkflowActionNode node : Iterables.filter(spec.getNodeIdMap().values(), WorkflowActionNode.class)) {
// For each type, we only need one node to setup the launch context
ScheduleProgramInfo programInfo = node.getProgram();
if (!runnerTypes.remove(programInfo.getProgramType())) {
continue;
}
// Find the ProgramRunner of the given type and setup the launch context
ProgramType programType = ProgramType.valueOfSchedulableType(programInfo.getProgramType());
ProgramRunner runner = programRunnerFactory.create(programType);
try {
if (runner instanceof DistributedProgramRunner) {
// Call setupLaunchConfig with the corresponding program
ProgramId programId = program.getId().getParent().program(programType, programInfo.getProgramName());
((DistributedProgramRunner) runner).setupLaunchConfig(launchConfig, Programs.create(cConf, program, programId, runner), options, cConf, hConf, tempDir);
acceptors.add(launchConfig.getClassAcceptor());
}
} finally {
if (runner instanceof Closeable) {
Closeables.closeQuietly((Closeable) runner);
}
}
}
// Set the class acceptor
launchConfig.setClassAcceptor(new AndClassAcceptor(acceptors));
// Clear and set the runnable for the workflow driver
launchConfig.clearRunnables();
Resources resources = findDriverResources(program.getApplicationSpecification().getSpark(), program.getApplicationSpecification().getMapReduce(), spec);
resources = SystemArguments.getResources(options.getUserArguments(), resources);
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), resources, 1, 0);
}
use of co.cask.cdap.api.workflow.ScheduleProgramInfo in project cdap by caskdata.
the class WorkflowVerificationTest method verifyWorkflowWithLocalDatasetSpecification.
private void verifyWorkflowWithLocalDatasetSpecification(ApplicationSpecification appSpec) {
WorkflowSpecification spec = appSpec.getWorkflows().get("WorkflowWithLocalDatasets");
List<WorkflowNode> nodes = spec.getNodes();
Assert.assertTrue(nodes.size() == 2);
WorkflowNode node = nodes.get(0);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
WorkflowActionNode actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.MAPREDUCE, "MR1")));
node = nodes.get(1);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.SPARK, "SP1")));
Map<String, DatasetCreationSpec> localDatasetSpecs = spec.getLocalDatasetSpecs();
Assert.assertEquals(5, localDatasetSpecs.size());
DatasetCreationSpec datasetCreationSpec = localDatasetSpecs.get("mytable");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile_with_properties");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("prop_value", datasetCreationSpec.getProperties().getProperties().get("prop_key"));
datasetCreationSpec = localDatasetSpecs.get("mytablefromtype");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfilefromtype");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("another_prop_value", datasetCreationSpec.getProperties().getProperties().get("another_prop_key"));
// Check if the application specification has correct modules
Map<String, String> datasetModules = appSpec.getDatasetModules();
Assert.assertEquals(2, datasetModules.size());
Assert.assertTrue(datasetModules.containsKey(FileSet.class.getName()));
Assert.assertTrue(datasetModules.containsKey(Table.class.getName()));
}
use of co.cask.cdap.api.workflow.ScheduleProgramInfo in project cdap by caskdata.
the class ScheduleSpecificationCodecTest method testBackwardsCompatibility.
@Test
public void testBackwardsCompatibility() throws Exception {
// Before 2.8, the ScheduleSpecificationCodec used to have the same behavior as what Gson would do by
// default, and only Schedules existed. We make sure that ScheduleSpecification persisted before
// 2.8 can still be deserialized using the new codec.
String cronEntry = "* * * * *";
Schedule schedule = new OldSchedule("foo", "bar", cronEntry);
ScheduleProgramInfo programInfo = new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, "testWorkflow");
ImmutableMap<String, String> properties = ImmutableMap.of("a", "b", "c", "d");
ScheduleSpecification specification = new ScheduleSpecification(schedule, programInfo, properties);
// Use default Gson to serialize
String jsonStr = new Gson().toJson(specification);
ScheduleSpecification deserialized = GSON.fromJson(jsonStr, ScheduleSpecification.class);
ScheduleSpecification expectedSpec = new ScheduleSpecification(Schedules.builder(schedule.getName()).setDescription(schedule.getDescription()).createTimeSchedule(cronEntry), programInfo, properties);
Assert.assertEquals(expectedSpec, deserialized);
}
use of co.cask.cdap.api.workflow.ScheduleProgramInfo in project cdap by caskdata.
the class ScheduleSpecificationCodecTest method testTimeSchedule.
@Test
public void testTimeSchedule() throws Exception {
TimeSchedule timeSchedule = (TimeSchedule) Schedules.builder("foo").setDescription("bar").createTimeSchedule("cronEntry");
ScheduleProgramInfo programInfo = new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, "testWorkflow");
ImmutableMap<String, String> properties = ImmutableMap.of("a", "b", "c", "d");
ScheduleSpecification specification = new ScheduleSpecification(timeSchedule, programInfo, properties);
String jsonStr = GSON.toJson(specification);
ScheduleSpecification deserialized = GSON.fromJson(jsonStr, ScheduleSpecification.class);
Assert.assertEquals(specification, deserialized);
}
Aggregations