use of io.cdap.cdap.proto.id.ScheduleId in project cdap by cdapio.
the class ScheduleFetcherTest method testGetScheduleNotFound.
@Test(expected = ScheduleNotFoundException.class)
public void testGetScheduleNotFound() throws Exception {
ScheduleFetcher fetcher = getScheduleFetcher(fetcherType);
String namespace = TEST_NAMESPACE1;
String appName = AllProgramsApp.NAME;
// Deploy the application.
deploy(AllProgramsApp.class, 200, Constants.Gateway.API_VERSION_3_TOKEN, namespace);
// Get and validate the schedule
ScheduleId scheduleId = new ScheduleId(namespace, appName, "InvalidSchedule");
try {
ScheduleDetail scheduleDetail = fetcher.get(scheduleId);
} finally {
// Delete the application
Assert.assertEquals(200, doDelete(getVersionedAPIPath("apps/", Constants.Gateway.API_VERSION_3_TOKEN, namespace)).getResponseCode());
}
}
use of io.cdap.cdap.proto.id.ScheduleId in project cdap by cdapio.
the class ScheduleFetcherTest method testGetSchedule.
@Test
public void testGetSchedule() throws Exception {
ScheduleFetcher fetcher = getScheduleFetcher(fetcherType);
String namespace = TEST_NAMESPACE1;
String appName = AppWithSchedule.NAME;
String schedule = AppWithSchedule.SCHEDULE;
// Deploy the application with just 1 schedule on the workflow.
Config appConfig = new AppWithSchedule.AppConfig(true, true, false);
deploy(AppWithSchedule.class, 200, Constants.Gateway.API_VERSION_3_TOKEN, namespace, appConfig);
// Get and validate the schedule
ScheduleId scheduleId = new ScheduleId(namespace, appName, schedule);
ScheduleDetail scheduleDetail = fetcher.get(scheduleId);
Assert.assertEquals(schedule, scheduleDetail.getName());
// Delete the application
Assert.assertEquals(200, doDelete(getVersionedAPIPath("apps/", Constants.Gateway.API_VERSION_3_TOKEN, namespace)).getResponseCode());
}
use of io.cdap.cdap.proto.id.ScheduleId in project cdap by cdapio.
the class MetadataKeyTest method testVersionedEntitiesKey.
@Test
public void testVersionedEntitiesKey() {
// CDAP-13597 Metadata for versioned entity is version independent i.e if there are two application version v1
// and v2 and a tag 'tag1' is added to either one it will be be reflected to both as we don't store the
// application/schedule/programs with it's version. Following tests test that for such versioned entity the keys
// are the same i.e default version
// Key for versioned application/schedule/program should be the same
// application
// default version
ApplicationId applicationId1 = new ApplicationId("ns", "app");
// custom version
ApplicationId applicationId2 = new ApplicationId("ns", "app", "2");
// non-version Application metadata entity
MDSKey mdsValueKey = MetadataKey.createValueRowKey(applicationId1.toMetadataEntity(), "key1");
MetadataEntity actual = MetadataKey.extractMetadataEntityFromKey(mdsValueKey.getKey());
Assert.assertEquals(applicationId1.toMetadataEntity(), actual);
mdsValueKey = MetadataKey.createValueRowKey(applicationId2.toMetadataEntity(), "key1");
actual = MetadataKey.extractMetadataEntityFromKey(mdsValueKey.getKey());
Assert.assertEquals(applicationId1.toMetadataEntity(), actual);
// program
// default version
ProgramId programId1 = new ApplicationId("ns", "app").program(ProgramType.SERVICE, "s");
// custom version
ProgramId programId2 = new ApplicationId("ns", "app", "2").program(ProgramType.SERVICE, "s");
mdsValueKey = MetadataKey.createValueRowKey(programId1.toMetadataEntity(), "key1");
actual = MetadataKey.extractMetadataEntityFromKey(mdsValueKey.getKey());
Assert.assertEquals(programId1.toMetadataEntity(), actual);
mdsValueKey = MetadataKey.createValueRowKey(programId2.toMetadataEntity(), "key1");
actual = MetadataKey.extractMetadataEntityFromKey(mdsValueKey.getKey());
Assert.assertEquals(programId1.toMetadataEntity(), actual);
// schedule
// default version
ScheduleId scheduleId1 = new ApplicationId("ns", "app").schedule("s");
// custom version
ScheduleId scheduleId2 = new ApplicationId("ns", "app", "2").schedule("s");
mdsValueKey = MetadataKey.createValueRowKey(scheduleId1.toMetadataEntity(), "key1");
actual = MetadataKey.extractMetadataEntityFromKey(mdsValueKey.getKey());
Assert.assertEquals(scheduleId1.toMetadataEntity(), actual);
mdsValueKey = MetadataKey.createValueRowKey(scheduleId2.toMetadataEntity(), "key1");
actual = MetadataKey.extractMetadataEntityFromKey(mdsValueKey.getKey());
Assert.assertEquals(scheduleId1.toMetadataEntity(), actual);
}
use of io.cdap.cdap.proto.id.ScheduleId in project cdap by cdapio.
the class DataPipelineTest method deployPipelineWithSchedule.
private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
String tableName = "actionScheduleTable" + pipelineName + engine;
String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
String key1 = key1Mapping.getTarget();
String key2 = key2Mapping.getTarget();
ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
ApplicationManager appManager = deployApplication(appId, appRequest);
// there should be only two programs - one workflow and one mapreduce/spark
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
// Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
// write one record to each source
DataSetManager<Table> inputManager = getDataset(sourceName);
MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
// Use properties from the triggering pipeline as values for runtime argument key1, key2
TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
ScheduleId scheduleId = appId.schedule("completeSchedule");
appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null, null));
appManager.enableSchedule(scheduleId);
return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
use of io.cdap.cdap.proto.id.ScheduleId in project cdap by cdapio.
the class UpdateTimeScheduleCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream printStream) throws Exception {
String scheduleName = arguments.get(ArgumentName.SCHEDULE_NAME.toString());
String[] programIdParts = arguments.get(ArgumentName.PROGRAM.toString()).split("\\.");
String version = arguments.getOptional(ArgumentName.APP_VERSION.toString());
String scheduleDescription = arguments.getOptional(ArgumentName.DESCRIPTION.toString(), "");
String cronExpression = arguments.get(ArgumentName.CRON_EXPRESSION.toString());
String schedulePropertiesString = arguments.getOptional(ArgumentName.SCHEDULE_PROPERTIES.toString(), "");
String scheduleRunConcurrencyString = arguments.getOptional(ArgumentName.CONCURRENCY.toString(), null);
if (programIdParts.length < 2) {
throw new CommandInputError(this);
}
String appId = programIdParts[0];
NamespaceId namespaceId = cliConfig.getCurrentNamespace();
ApplicationId applicationId = (version == null) ? namespaceId.app(appId) : namespaceId.app(appId, version);
ScheduleId scheduleId = applicationId.schedule(scheduleName);
String description = scheduleDescription == null ? null : scheduleDescription;
ScheduleProgramInfo programInfo = new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, programIdParts[1]);
List<Constraint> constraints = scheduleRunConcurrencyString == null ? ImmutableList.of() : ImmutableList.of(new ProtoConstraint.ConcurrencyConstraint(Integer.valueOf(scheduleRunConcurrencyString)));
Map<String, String> propertiesMap = ArgumentParser.parseMap(schedulePropertiesString, ArgumentName.SCHEDULE_PROPERTIES.toString());
ScheduleDetail scheduleDetail = new ScheduleDetail(scheduleName, description, programInfo, propertiesMap, new ProtoTrigger.TimeTrigger(cronExpression), constraints, null);
scheduleClient.update(scheduleId, scheduleDetail);
printStream.printf("Successfully updated schedule '%s' in app '%s'\n", scheduleName, appId);
}
Aggregations