use of co.cask.cdap.internal.app.runtime.workflow.BasicWorkflowToken in project cdap by caskdata.
the class TriggeringScheduleInfoAdapterTest method testSerDeserScheduleInfo.
@Test
public void testSerDeserScheduleInfo() {
BasicWorkflowToken token = new BasicWorkflowToken(1);
token.setCurrentNode("node");
token.put("tokenKey", "tokenVal");
List<TriggerInfo> triggerInfos = ImmutableList.<TriggerInfo>of(new DefaultProgramStatusTriggerInfo("ns", Specifications.from(new WebCrawlApp()), ProgramType.WORKFLOW, "workflow", RunIds.generate(), ProgramStatus.COMPLETED, token, Collections.<String, String>emptyMap()), new DefaultPartitionTriggerInfo("ns", "ds", 10, 11), new DefaultTimeTriggerInfo("1 * * * *", 0L));
TriggeringScheduleInfo scheduleInfo = new DefaultTriggeringScheduleInfo("schedule", "description", triggerInfos, ImmutableMap.of("key", "value"));
String scheduleInfoJson = GSON.toJson(scheduleInfo);
TriggeringScheduleInfo deserializedScheduleInfo = GSON.fromJson(scheduleInfoJson, TriggeringScheduleInfo.class);
Assert.assertEquals(scheduleInfoJson, GSON.toJson(deserializedScheduleInfo));
DefaultProgramStatusTriggerInfo expectedProgramStatusTriggerInfo = (DefaultProgramStatusTriggerInfo) triggerInfos.get(0);
DefaultProgramStatusTriggerInfo deserializedProgramStatusTriggerInfo = (DefaultProgramStatusTriggerInfo) deserializedScheduleInfo.getTriggerInfos().get(0);
Assert.assertEquals(expectedProgramStatusTriggerInfo.getApplicationSpecification().getName(), deserializedProgramStatusTriggerInfo.getApplicationSpecification().getName());
Assert.assertEquals(expectedProgramStatusTriggerInfo.getWorkflowToken().getAll(), deserializedProgramStatusTriggerInfo.getWorkflowToken().getAll());
}
use of co.cask.cdap.internal.app.runtime.workflow.BasicWorkflowToken in project cdap by caskdata.
the class AppMetadataStore method getWorkflowToken.
public WorkflowToken getWorkflowToken(ProgramId workflowId, String workflowRunId) {
Preconditions.checkArgument(ProgramType.WORKFLOW == workflowId.getType());
// Workflow token is stored with following key:
// [wft][namespace][app][version][WORKFLOW][workflowName][workflowRun]
MDSKey key = getProgramKeyBuilder(TYPE_WORKFLOW_TOKEN, workflowId.run(workflowRunId)).build();
BasicWorkflowToken workflowToken = get(key, BasicWorkflowToken.class);
// Check without the version string only for default version
if (!upgradeComplete.get() && workflowToken == null && workflowId.getVersion().equals(ApplicationId.DEFAULT_VERSION)) {
key = getVersionLessProgramKeyBuilder(TYPE_WORKFLOW_TOKEN, workflowId).add(workflowRunId).build();
workflowToken = get(key, BasicWorkflowToken.class);
}
if (workflowToken == null) {
LOG.debug("No workflow token available for workflow: {}, runId: {}", workflowId, workflowRunId);
// Its ok to not allow any updates by returning a 0 size token.
return new BasicWorkflowToken(0);
}
return workflowToken;
}
use of co.cask.cdap.internal.app.runtime.workflow.BasicWorkflowToken in project cdap by caskdata.
the class SparkExecutionServiceTest method testWorkflowToken.
@Test
public void testWorkflowToken() throws Exception {
ProgramRunId programRunId = new ProgramRunId("ns", "app", ProgramType.SPARK, "test", RunIds.generate().getId());
// Start a service with empty workflow token
BasicWorkflowToken token = new BasicWorkflowToken(10);
token.setCurrentNode("spark");
SparkExecutionService service = new SparkExecutionService(locationFactory, InetAddress.getLoopbackAddress().getCanonicalHostName(), programRunId, token);
service.startAndWait();
try {
SparkExecutionClient client = new SparkExecutionClient(service.getBaseURI(), programRunId);
// Update token via heartbeat
BasicWorkflowToken clientToken = new BasicWorkflowToken(10);
clientToken.setCurrentNode("spark");
for (int i = 0; i < 5; i++) {
clientToken.put("key", "value" + i);
client.heartbeat(clientToken);
// The server side token should get updated
Assert.assertEquals(Value.of("value" + i), token.get("key", "spark"));
}
clientToken.put("completed", "true");
client.completed(clientToken);
} finally {
service.stopAndWait();
}
// The token on the service side should get updated after the completed call.
Map<String, Value> values = token.getAllFromNode("spark");
Map<String, Value> expected = ImmutableMap.of("key", Value.of("value4"), "completed", Value.of("true"));
Assert.assertEquals(expected, values);
}
Aggregations