Search in sources :

Example 1 with PluginPropertyMapping

use of co.cask.cdap.etl.proto.v2.PluginPropertyMapping in project cdap by caskdata.

the class DataPipelineTest method testScheduledPipelines.

@Test
public void testScheduledPipelines() throws Exception {
    // Deploy middle pipeline scheduled to be triggered by the completion of head pipeline
    String expectedValue1 = "headArgValue";
    String expectedValue2 = "headPluginValue";
    WorkflowManager middleWorkflowManagerMR = deployPipelineWithSchedule("middle", Engine.SPARK, "head", new ArgumentMapping("head-arg", "middle-arg"), expectedValue1, new PluginPropertyMapping("action1", "value", "middle-plugin"), expectedValue2);
    // Deploy tail pipeline scheduled to be triggered by the completion of middle pipeline
    WorkflowManager tailWorkflowManagerMR = deployPipelineWithSchedule("tail", Engine.MAPREDUCE, "middle", new ArgumentMapping("middle-arg", "tail-arg"), expectedValue1, new PluginPropertyMapping("action2", "value", "tail-plugin"), expectedValue2);
    // Run the head pipeline and wait for its completion
    runHeadTriggeringPipeline(Engine.MAPREDUCE, expectedValue1, expectedValue2);
    // After the completion of the head pipeline, verify the results of middle pipeline
    assertTriggeredPipelinesResult(middleWorkflowManagerMR, "middle", Engine.SPARK, expectedValue1, expectedValue2);
    // After the completion of the middle pipeline, verify the results of tail pipeline
    assertTriggeredPipelinesResult(tailWorkflowManagerMR, "tail", Engine.MAPREDUCE, expectedValue1, expectedValue2);
}
Also used : ArgumentMapping(co.cask.cdap.etl.proto.v2.ArgumentMapping) WorkflowManager(co.cask.cdap.test.WorkflowManager) PluginPropertyMapping(co.cask.cdap.etl.proto.v2.PluginPropertyMapping) Test(org.junit.Test)

Example 2 with PluginPropertyMapping

use of co.cask.cdap.etl.proto.v2.PluginPropertyMapping in project cdap by caskdata.

the class DataPipelineTest method deployPipelineWithSchedule.

private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
    String tableName = "actionScheduleTable" + pipelineName + engine;
    String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
    String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
    String key1 = key1Mapping.getTarget();
    String key2 = key2Mapping.getTarget();
    ETLBatchConfig etlConfig = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
    AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
    ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
    ApplicationManager appManager = deployApplication(appId, appRequest);
    // there should be only two programs - one workflow and one mapreduce/spark
    Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
    // Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
    StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
    StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
    StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
    // write one record to each source
    DataSetManager<Table> inputManager = getDataset(sourceName);
    MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
    String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
    // Use properties from the triggering pipeline as values for runtime argument key1, key2
    TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
    ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
    ScheduleId scheduleId = appId.schedule("completeSchedule");
    appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.<Constraint>of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null));
    appManager.enableSchedule(scheduleId);
    return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
Also used : ApplicationManager(co.cask.cdap.test.ApplicationManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Table(co.cask.cdap.api.dataset.table.Table) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) Schema(co.cask.cdap.api.data.schema.Schema) WorkflowId(co.cask.cdap.proto.id.WorkflowId) ScheduleId(co.cask.cdap.proto.id.ScheduleId) StructuredRecord(co.cask.cdap.api.data.format.StructuredRecord) AppRequest(co.cask.cdap.proto.artifact.AppRequest) ETLBatchConfig(co.cask.cdap.etl.proto.v2.ETLBatchConfig) ETLStage(co.cask.cdap.etl.proto.v2.ETLStage) TriggeringPropertyMapping(co.cask.cdap.etl.proto.v2.TriggeringPropertyMapping) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ScheduleProgramInfo(co.cask.cdap.api.workflow.ScheduleProgramInfo)

Example 3 with PluginPropertyMapping

use of co.cask.cdap.etl.proto.v2.PluginPropertyMapping in project cdap by caskdata.

the class SmartWorkflow method updateTokenWithTriggeringProperties.

private void updateTokenWithTriggeringProperties(TriggeringScheduleInfo scheduleInfo, TriggeringPropertyMapping propertiesMapping, WorkflowToken token) {
    List<ProgramStatusTriggerInfo> programStatusTriggerInfos = new ArrayList<>();
    for (TriggerInfo info : scheduleInfo.getTriggerInfos()) {
        if (info instanceof ProgramStatusTriggerInfo) {
            programStatusTriggerInfos.add((ProgramStatusTriggerInfo) info);
        }
    }
    // If no ProgramStatusTriggerInfo, no need of override the existing runtimeArgs
    if (programStatusTriggerInfos.isEmpty()) {
        return;
    }
    // Currently only expecting one trigger in a schedule
    ProgramStatusTriggerInfo triggerInfo = programStatusTriggerInfos.get(0);
    BasicArguments triggeringArguments = new BasicArguments(triggerInfo.getWorkflowToken(), triggerInfo.getRuntimeArguments());
    // Get the value of every triggering pipeline arguments specified in the propertiesMapping and update newRuntimeArgs
    List<ArgumentMapping> argumentMappings = propertiesMapping.getArguments();
    for (ArgumentMapping mapping : argumentMappings) {
        String sourceKey = mapping.getSource();
        if (sourceKey == null) {
            LOG.warn("The name of argument from the triggering pipeline cannot be null, " + "skip this argument mapping: '{}'.", mapping);
            continue;
        }
        String value = triggeringArguments.get(sourceKey);
        if (value == null) {
            LOG.warn("Runtime argument '{}' is not found in run '{}' of the triggering pipeline '{}' " + "in namespace '{}' ", sourceKey, triggerInfo.getRunId(), triggerInfo.getApplicationSpecification().getName(), triggerInfo.getNamespace());
            continue;
        }
        // Use the argument name in the triggering pipeline if target is not specified
        String targetKey = mapping.getTarget() == null ? sourceKey : mapping.getTarget();
        token.put(targetKey, value);
    }
    // Get the resolved plugin properties map from triggering pipeline's workflow token in triggeringArguments
    Map<String, Map<String, String>> resolvedProperties = GSON.fromJson(triggeringArguments.get(RESOLVED_PLUGIN_PROPERTIES_MAP), STAGE_PROPERTIES_MAP);
    for (PluginPropertyMapping mapping : propertiesMapping.getPluginProperties()) {
        String stageName = mapping.getStageName();
        if (stageName == null) {
            LOG.warn("The name of the stage cannot be null in plugin property mapping, skip this mapping: '{}'.", mapping);
            continue;
        }
        Map<String, String> pluginProperties = resolvedProperties.get(stageName);
        if (pluginProperties == null) {
            LOG.warn("No plugin properties can be found with stage name '{}' in triggering pipeline '{}' " + "in namespace '{}' ", mapping.getStageName(), triggerInfo.getApplicationSpecification().getName(), triggerInfo.getNamespace());
            continue;
        }
        String sourceKey = mapping.getSource();
        if (sourceKey == null) {
            LOG.warn("The name of argument from the triggering pipeline cannot be null, " + "skip this argument mapping: '{}'.", mapping);
            continue;
        }
        String value = pluginProperties.get(sourceKey);
        if (value == null) {
            LOG.warn("No property with name '{}' can be found in plugin '{}' of the triggering pipeline '{}' " + "in namespace '{}' ", sourceKey, stageName, triggerInfo.getApplicationSpecification().getName(), triggerInfo.getNamespace());
            continue;
        }
        // Use the argument name in the triggering pipeline if target is not specified
        String targetKey = mapping.getTarget() == null ? sourceKey : mapping.getTarget();
        token.put(targetKey, value);
    }
    return;
}
Also used : ArgumentMapping(co.cask.cdap.etl.proto.v2.ArgumentMapping) ProgramStatusTriggerInfo(co.cask.cdap.api.schedule.ProgramStatusTriggerInfo) ArrayList(java.util.ArrayList) ProgramStatusTriggerInfo(co.cask.cdap.api.schedule.ProgramStatusTriggerInfo) TriggerInfo(co.cask.cdap.api.schedule.TriggerInfo) PluginPropertyMapping(co.cask.cdap.etl.proto.v2.PluginPropertyMapping) BasicArguments(co.cask.cdap.etl.common.BasicArguments) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Aggregations

ArgumentMapping (co.cask.cdap.etl.proto.v2.ArgumentMapping)2 PluginPropertyMapping (co.cask.cdap.etl.proto.v2.PluginPropertyMapping)2 StructuredRecord (co.cask.cdap.api.data.format.StructuredRecord)1 Schema (co.cask.cdap.api.data.schema.Schema)1 KeyValueTable (co.cask.cdap.api.dataset.lib.KeyValueTable)1 Table (co.cask.cdap.api.dataset.table.Table)1 ProgramStatusTriggerInfo (co.cask.cdap.api.schedule.ProgramStatusTriggerInfo)1 TriggerInfo (co.cask.cdap.api.schedule.TriggerInfo)1 ScheduleProgramInfo (co.cask.cdap.api.workflow.ScheduleProgramInfo)1 BasicArguments (co.cask.cdap.etl.common.BasicArguments)1 ETLBatchConfig (co.cask.cdap.etl.proto.v2.ETLBatchConfig)1 ETLStage (co.cask.cdap.etl.proto.v2.ETLStage)1 TriggeringPropertyMapping (co.cask.cdap.etl.proto.v2.TriggeringPropertyMapping)1 ProgramStatusTrigger (co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger)1 Constraint (co.cask.cdap.internal.schedule.constraint.Constraint)1 ScheduleDetail (co.cask.cdap.proto.ScheduleDetail)1 AppRequest (co.cask.cdap.proto.artifact.AppRequest)1 ApplicationId (co.cask.cdap.proto.id.ApplicationId)1 ScheduleId (co.cask.cdap.proto.id.ScheduleId)1 WorkflowId (co.cask.cdap.proto.id.WorkflowId)1