Search in sources :

Example 1 with WorkflowId

use of co.cask.cdap.proto.id.WorkflowId in project cdap by caskdata.

the class ProgramLifecycleHttpHandler method doGetSchedules.

protected void doGetSchedules(HttpResponder responder, String namespace, String app, String version, @Nullable String workflow, @Nullable String format) throws NotFoundException, BadRequestException {
    boolean asScheduleSpec = returnScheduleAsSpec(format);
    ApplicationId applicationId = new ApplicationId(namespace, app, version);
    ApplicationSpecification appSpec = store.getApplication(applicationId);
    if (appSpec == null) {
        throw new NotFoundException(applicationId);
    }
    List<ProgramSchedule> schedules;
    if (workflow != null) {
        WorkflowId workflowId = applicationId.workflow(workflow);
        if (appSpec.getWorkflows().get(workflow) == null) {
            throw new NotFoundException(workflowId);
        }
        schedules = programScheduler.listSchedules(workflowId);
    } else {
        schedules = programScheduler.listSchedules(applicationId);
    }
    List<ScheduleDetail> details = Schedulers.toScheduleDetails(schedules);
    if (asScheduleSpec) {
        List<ScheduleSpecification> specs = ScheduleDetail.toScheduleSpecs(details);
        responder.sendJson(HttpResponseStatus.OK, specs, Schedulers.SCHEDULE_SPECS_TYPE, GSON_FOR_SCHEDULES);
    } else {
        responder.sendJson(HttpResponseStatus.OK, details, Schedulers.SCHEDULE_DETAILS_TYPE, GSON_FOR_SCHEDULES);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) NamespaceNotFoundException(co.cask.cdap.common.NamespaceNotFoundException) NotFoundException(co.cask.cdap.common.NotFoundException) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) ApplicationId(co.cask.cdap.proto.id.ApplicationId) WorkflowId(co.cask.cdap.proto.id.WorkflowId) ScheduleSpecification(co.cask.cdap.api.schedule.ScheduleSpecification)

Example 2 with WorkflowId

use of co.cask.cdap.proto.id.WorkflowId in project cdap by caskdata.

the class DefaultStore method setStop.

@Override
public void setStop(final ProgramId id, final String pid, final long endTime, final ProgramRunStatus runStatus, final BasicThrowable failureCause) {
    Preconditions.checkArgument(runStatus != null, "Run state of program run should be defined");
    Transactions.executeUnchecked(transactional, new TxRunnable() {

        @Override
        public void run(DatasetContext context) throws Exception {
            AppMetadataStore metaStore = getAppMetadataStore(context);
            metaStore.recordProgramStop(id, pid, endTime, runStatus, failureCause);
            // This block has been added so that completed workflow runs can be logged to the workflow dataset
            WorkflowId workflowId = new WorkflowId(id.getParent(), id.getProgram());
            if (id.getType() == ProgramType.WORKFLOW && runStatus == ProgramRunStatus.COMPLETED) {
                recordCompletedWorkflow(metaStore, getWorkflowDataset(context), workflowId, pid);
            }
        // todo: delete old history data
        }
    });
}
Also used : TxRunnable(co.cask.cdap.api.TxRunnable) DatasetContext(co.cask.cdap.api.data.DatasetContext) WorkflowId(co.cask.cdap.proto.id.WorkflowId) TransactionFailureException(org.apache.tephra.TransactionFailureException) ProgramNotFoundException(co.cask.cdap.common.ProgramNotFoundException) ApplicationNotFoundException(co.cask.cdap.common.ApplicationNotFoundException) TransactionNotInProgressException(org.apache.tephra.TransactionNotInProgressException) TransactionConflictException(org.apache.tephra.TransactionConflictException) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) NoSuchElementException(java.util.NoSuchElementException) IOException(java.io.IOException)

Example 3 with WorkflowId

use of co.cask.cdap.proto.id.WorkflowId in project cdap by caskdata.

the class DataPipelineTest method deployPipelineWithSchedule.

private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
    String tableName = "actionScheduleTable" + pipelineName + engine;
    String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
    String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
    String key1 = key1Mapping.getTarget();
    String key2 = key2Mapping.getTarget();
    ETLBatchConfig etlConfig = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
    AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
    ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
    ApplicationManager appManager = deployApplication(appId, appRequest);
    // there should be only two programs - one workflow and one mapreduce/spark
    Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
    // Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
    StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
    StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
    StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
    // write one record to each source
    DataSetManager<Table> inputManager = getDataset(sourceName);
    MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
    String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
    // Use properties from the triggering pipeline as values for runtime argument key1, key2
    TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
    ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
    ScheduleId scheduleId = appId.schedule("completeSchedule");
    appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.<Constraint>of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null));
    appManager.enableSchedule(scheduleId);
    return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
Also used : ApplicationManager(co.cask.cdap.test.ApplicationManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Table(co.cask.cdap.api.dataset.table.Table) Constraint(co.cask.cdap.internal.schedule.constraint.Constraint) Schema(co.cask.cdap.api.data.schema.Schema) WorkflowId(co.cask.cdap.proto.id.WorkflowId) ScheduleId(co.cask.cdap.proto.id.ScheduleId) StructuredRecord(co.cask.cdap.api.data.format.StructuredRecord) AppRequest(co.cask.cdap.proto.artifact.AppRequest) ETLBatchConfig(co.cask.cdap.etl.proto.v2.ETLBatchConfig) ETLStage(co.cask.cdap.etl.proto.v2.ETLStage) TriggeringPropertyMapping(co.cask.cdap.etl.proto.v2.TriggeringPropertyMapping) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) ScheduleDetail(co.cask.cdap.proto.ScheduleDetail) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ScheduleProgramInfo(co.cask.cdap.api.workflow.ScheduleProgramInfo)

Example 4 with WorkflowId

use of co.cask.cdap.proto.id.WorkflowId in project cdap by caskdata.

the class WorkflowStatsSLAHttpHandlerTest method testCompare.

@Test
public void testCompare() throws Exception {
    deploy(WorkflowApp.class);
    String workflowName = "FunWorkflow";
    String mapreduceName = "ClassicWordCount";
    String sparkName = "SparkWorkflowTest";
    WorkflowId workflowProgram = WORKFLOW_APP.workflow(workflowName);
    ProgramId mapreduceProgram = WORKFLOW_APP.mr(mapreduceName);
    ProgramId sparkProgram = WORKFLOW_APP.spark(sparkName);
    List<RunId> workflowRunIdList = setupRuns(workflowProgram, mapreduceProgram, sparkProgram, store, 2);
    RunId workflowRun1 = workflowRunIdList.get(0);
    RunId workflowRun2 = workflowRunIdList.get(1);
    String request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/runs/%s/compare?other-run-id=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), workflowRun1.getId(), workflowRun2.getId());
    HttpResponse response = doGet(request);
    Collection<WorkflowStatsComparison.ProgramNodes> workflowStatistics = readResponse(response, new TypeToken<Collection<WorkflowStatsComparison.ProgramNodes>>() {
    }.getType());
    Assert.assertNotNull(workflowStatistics.iterator().next());
    Assert.assertEquals(2, workflowStatistics.size());
    for (WorkflowStatsComparison.ProgramNodes node : workflowStatistics) {
        if (node.getProgramType() == ProgramType.MAPREDUCE) {
            Assert.assertEquals(38L, (long) node.getWorkflowProgramDetailsList().get(0).getMetrics().get(TaskCounter.MAP_INPUT_RECORDS.name()));
        }
    }
}
Also used : WorkflowApp(co.cask.cdap.WorkflowApp) WorkflowStatsComparison(co.cask.cdap.proto.WorkflowStatsComparison) TypeToken(com.google.gson.reflect.TypeToken) HttpResponse(org.apache.http.HttpResponse) WorkflowId(co.cask.cdap.proto.id.WorkflowId) ProgramId(co.cask.cdap.proto.id.ProgramId) RunId(org.apache.twill.api.RunId) Test(org.junit.Test)

Example 5 with WorkflowId

use of co.cask.cdap.proto.id.WorkflowId in project cdap by caskdata.

the class WorkflowHttpHandlerTest method testKillSuspendedWorkflow.

@Category(XSlowTests.class)
@Test
public void testKillSuspendedWorkflow() throws Exception {
    HttpResponse response = deploy(SleepingWorkflowApp.class, Constants.Gateway.API_VERSION_3_TOKEN, TEST_NAMESPACE2);
    Assert.assertEquals(200, response.getStatusLine().getStatusCode());
    WorkflowId workflow = new WorkflowId(TEST_NAMESPACE2, "SleepWorkflowApp", "SleepWorkflow");
    // Start the workflow, with max long as sleep time. This make the workflow never complete by itself.
    startProgram(workflow, Collections.singletonMap("sleep.ms", Long.toString(Long.MAX_VALUE)), 200);
    waitState(workflow, ProgramStatus.RUNNING.name());
    String runId = getRunIdOfRunningProgram(Id.Program.fromEntityId(workflow));
    suspendWorkflow(Id.Program.fromEntityId(workflow), runId, 200);
    // Workflow status should be SUSPENDED
    waitState(workflow, ProgramStatus.STOPPED.name());
    stopProgram(Id.Program.fromEntityId(workflow), runId, 200);
    waitState(workflow, ProgramStatus.STOPPED.name());
    verifyProgramRuns(Id.Program.fromEntityId(workflow), ProgramRunStatus.KILLED, 0);
}
Also used : HttpResponse(org.apache.http.HttpResponse) WorkflowId(co.cask.cdap.proto.id.WorkflowId) Category(org.junit.experimental.categories.Category) Test(org.junit.Test)

Aggregations

WorkflowId (co.cask.cdap.proto.id.WorkflowId)18 ApplicationId (co.cask.cdap.proto.id.ApplicationId)8 ProgramId (co.cask.cdap.proto.id.ProgramId)7 Test (org.junit.Test)7 NotFoundException (co.cask.cdap.common.NotFoundException)5 RunRecord (co.cask.cdap.proto.RunRecord)5 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)4 Id (co.cask.cdap.common.id.Id)4 ScheduleDetail (co.cask.cdap.proto.ScheduleDetail)4 GET (javax.ws.rs.GET)4 Path (javax.ws.rs.Path)4 BadRequestException (co.cask.cdap.common.BadRequestException)3 WorkflowTokenDetail (co.cask.cdap.proto.WorkflowTokenDetail)3 TypeToken (com.google.gson.reflect.TypeToken)3 File (java.io.File)3 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 HttpResponse (org.apache.http.HttpResponse)3 WorkflowApp (co.cask.cdap.WorkflowApp)2 ApplicationNotFoundException (co.cask.cdap.common.ApplicationNotFoundException)2