use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class SystemProgramManagementService method reconcilePrograms.
private void reconcilePrograms() {
Map<ProgramId, Arguments> enabledProgramsMap = new HashMap<>(this.programsEnabled.get());
Set<ProgramRunId> programRunsToStop = new HashSet<>();
// Get all current runs
List<ProgramRuntimeService.RuntimeInfo> runtimeInfos = programRuntimeService.listAll(ProgramType.values());
// sort by descending order of runtime
runtimeInfos.sort((runtimeInfo1, runtimeInfo2) -> Long.compare(RunIds.getTime(runtimeInfo2.getController().getProgramRunId().getRun(), TimeUnit.MILLISECONDS), RunIds.getTime(runtimeInfo1.getController().getProgramRunId().getRun(), TimeUnit.MILLISECONDS)));
// Find programs to run and stop
for (ProgramRuntimeService.RuntimeInfo runtimeInfo : runtimeInfos) {
ProgramId programId = runtimeInfo.getProgramId();
if (!programId.getNamespaceId().equals(NamespaceId.SYSTEM)) {
// We care only about system programs
continue;
}
// If map doesnt have entry, program is disabled or it is an additional run.
if (enabledProgramsMap.remove(programId) == null) {
programRunsToStop.add(runtimeInfo.getController().getProgramRunId());
}
}
// start programs
startPrograms(enabledProgramsMap);
// stop programs
programRunsToStop.forEach(this::stopProgram);
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class DelayedProgramControllerTest method testListener.
@Test
public void testListener() throws InterruptedException {
ProgramRunId programRunId = new ProgramRunId("ns", "app", ProgramType.SPARK, "program", RunIds.generate().getId());
DelayedProgramController controller = new DelayedProgramController(programRunId);
CountDownLatch aliveLatch = new CountDownLatch(1);
CountDownLatch killedLatch = new CountDownLatch(1);
controller.addListener(new AbstractListener() {
@Override
public void init(ProgramController.State currentState, @Nullable Throwable cause) {
if (currentState == ProgramController.State.ALIVE) {
alive();
} else if (currentState == ProgramController.State.KILLED) {
completed();
}
}
@Override
public void alive() {
aliveLatch.countDown();
}
@Override
public void killed() {
killedLatch.countDown();
}
}, Threads.SAME_THREAD_EXECUTOR);
// Perform state change on the delegate
ProgramController delegate = new AbstractProgramController(programRunId) {
{
started();
}
@Override
protected void doSuspend() {
// no-op
}
@Override
protected void doResume() {
// no-op
}
@Override
protected void doStop() {
// no-op
}
@Override
protected void doCommand(String name, Object value) {
// no-op
}
};
controller.setProgramController(delegate);
Assert.assertTrue(aliveLatch.await(5, TimeUnit.SECONDS));
controller.stop();
Assert.assertTrue(killedLatch.await(5, TimeUnit.SECONDS));
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class GetWorkflowTokenCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
String[] programIdParts = arguments.get(elementType.getArgumentName().toString()).split("\\.");
ApplicationId appId = cliConfig.getCurrentNamespace().app(programIdParts[0]);
if (programIdParts.length < 2) {
throw new CommandInputError(this);
}
ProgramId workflowId = appId.workflow(programIdParts[1]);
ProgramRunId runId = workflowId.run(arguments.get(ArgumentName.RUN_ID.toString()));
WorkflowToken.Scope workflowTokenScope = null;
if (arguments.hasArgument(ArgumentName.WORKFLOW_TOKEN_SCOPE.toString())) {
String scope = arguments.get(ArgumentName.WORKFLOW_TOKEN_SCOPE.toString()).toUpperCase();
workflowTokenScope = WorkflowToken.Scope.valueOf(scope);
}
String key = null;
if (arguments.hasArgument(ArgumentName.WORKFLOW_TOKEN_KEY.toString())) {
key = arguments.get(ArgumentName.WORKFLOW_TOKEN_KEY.toString());
}
Table table;
if (arguments.hasArgument(ArgumentName.WORKFLOW_NODE.toString())) {
table = getWorkflowToken(runId, workflowTokenScope, key, arguments.get(ArgumentName.WORKFLOW_NODE.toString()));
} else {
table = getWorkflowToken(runId, workflowTokenScope, key);
}
cliConfig.getTableRenderer().render(cliConfig, output, table);
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class WorkflowClientTestRun method testWorkflowClient.
@Test
public void testWorkflowClient() throws Exception {
String keyValueTableType = "io.cdap.cdap.api.dataset.lib.KeyValueTable";
String filesetType = "io.cdap.cdap.api.dataset.lib.FileSet";
String outputPath = new File(TMP_FOLDER.newFolder(), "output").getAbsolutePath();
Map<String, String> runtimeArgs = ImmutableMap.of("inputPath", createInput("input"), "outputPath", outputPath, "dataset.*.keep.local", "true");
final WorkflowId workflowId = NamespaceId.DEFAULT.app(AppWithWorkflow.NAME).workflow(AppWithWorkflow.SampleWorkflow.NAME);
programClient.start(workflowId, false, runtimeArgs);
assertProgramRuns(programClient, workflowId, ProgramRunStatus.COMPLETED, 1, 120);
List<RunRecord> workflowRuns = programClient.getProgramRuns(workflowId, ProgramRunStatus.COMPLETED.name(), 0, Long.MAX_VALUE, 10);
Assert.assertEquals(1, workflowRuns.size());
String runId = workflowRuns.get(0).getPid();
ProgramRunId workflowRunId = workflowId.run(runId);
// Invalid test scenarios
try {
ProgramId nonExistentWorkflowId = new ProgramId(NamespaceId.DEFAULT.getNamespace(), AppWithWorkflow.NAME, ProgramType.WORKFLOW, "NonExistentWorkflow");
ProgramRunId nonExistentWorkflowRun = nonExistentWorkflowId.run(runId);
workflowClient.getWorkflowToken(nonExistentWorkflowRun);
Assert.fail("Should not find a workflow token for a non-existing workflow");
} catch (NotFoundException expected) {
// expected
}
try {
ProgramRunId invalidRunId = workflowId.run(RunIds.generate().getId());
workflowClient.getWorkflowToken(invalidRunId);
Assert.fail("Should not find a workflow token for a random run id");
} catch (NotFoundException expected) {
// expected
}
// Valid test scenarios
WorkflowTokenDetail workflowToken = workflowClient.getWorkflowToken(workflowRunId);
Assert.assertEquals(5, workflowToken.getTokenData().size());
workflowToken = workflowClient.getWorkflowToken(workflowRunId, WorkflowToken.Scope.SYSTEM);
Assert.assertTrue(workflowToken.getTokenData().size() > 0);
workflowToken = workflowClient.getWorkflowToken(workflowRunId, "start_time");
Map<String, List<WorkflowTokenDetail.NodeValueDetail>> tokenData = workflowToken.getTokenData();
Assert.assertEquals(AppWithWorkflow.WordCountMapReduce.NAME, tokenData.get("start_time").get(0).getNode());
Assert.assertTrue(Long.parseLong(tokenData.get("start_time").get(0).getValue()) < System.currentTimeMillis());
workflowToken = workflowClient.getWorkflowToken(workflowRunId, WorkflowToken.Scope.USER, "action_type");
tokenData = workflowToken.getTokenData();
Assert.assertEquals(AppWithWorkflow.WordCountMapReduce.NAME, tokenData.get("action_type").get(0).getNode());
Assert.assertEquals("MapReduce", tokenData.get("action_type").get(0).getValue());
String nodeName = AppWithWorkflow.SampleWorkflow.FIRST_ACTION;
WorkflowTokenNodeDetail workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, nodeName);
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, workflowTokenAtNode.getTokenDataAtNode().get(AppWithWorkflow.DummyAction.TOKEN_KEY));
workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, nodeName, WorkflowToken.Scope.SYSTEM);
Assert.assertEquals(0, workflowTokenAtNode.getTokenDataAtNode().size());
workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, nodeName, AppWithWorkflow.DummyAction.TOKEN_KEY);
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, workflowTokenAtNode.getTokenDataAtNode().get(AppWithWorkflow.DummyAction.TOKEN_KEY));
String reduceOutputRecordsCounter = "org.apache.hadoop.mapreduce.TaskCounter.REDUCE_OUTPUT_RECORDS";
workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, AppWithWorkflow.WordCountMapReduce.NAME, WorkflowToken.Scope.SYSTEM, reduceOutputRecordsCounter);
Assert.assertEquals(6, Integer.parseInt(workflowTokenAtNode.getTokenDataAtNode().get(reduceOutputRecordsCounter)));
Map<String, DatasetSpecificationSummary> localDatasetSummaries = workflowClient.getWorkflowLocalDatasets(workflowRunId);
Assert.assertEquals(2, localDatasetSummaries.size());
DatasetSpecificationSummary keyValueTableSummary = new DatasetSpecificationSummary("MyTable." + runId, keyValueTableType, ImmutableMap.of("foo", "bar"));
Assert.assertEquals(keyValueTableSummary, localDatasetSummaries.get("MyTable"));
DatasetSpecificationSummary filesetSummary = new DatasetSpecificationSummary("MyFile." + runId, filesetType, ImmutableMap.of("anotherFoo", "anotherBar"));
Assert.assertEquals(filesetSummary, localDatasetSummaries.get("MyFile"));
workflowClient.deleteWorkflowLocalDatasets(workflowRunId);
localDatasetSummaries = workflowClient.getWorkflowLocalDatasets(workflowRunId);
Assert.assertEquals(0, localDatasetSummaries.size());
Map<String, WorkflowNodeStateDetail> nodeStates = workflowClient.getWorkflowNodeStates(workflowRunId);
Assert.assertEquals(3, nodeStates.size());
WorkflowNodeStateDetail nodeState = nodeStates.get(AppWithWorkflow.SampleWorkflow.FIRST_ACTION);
Assert.assertTrue(AppWithWorkflow.SampleWorkflow.FIRST_ACTION.equals(nodeState.getNodeId()));
Assert.assertTrue(NodeStatus.COMPLETED == nodeState.getNodeStatus());
nodeState = nodeStates.get(AppWithWorkflow.SampleWorkflow.SECOND_ACTION);
Assert.assertTrue(AppWithWorkflow.SampleWorkflow.SECOND_ACTION.equals(nodeState.getNodeId()));
Assert.assertTrue(NodeStatus.COMPLETED == nodeState.getNodeStatus());
nodeState = nodeStates.get(AppWithWorkflow.SampleWorkflow.WORD_COUNT_MR);
Assert.assertTrue(AppWithWorkflow.SampleWorkflow.WORD_COUNT_MR.equals(nodeState.getNodeId()));
Assert.assertTrue(NodeStatus.COMPLETED == nodeState.getNodeStatus());
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class LogHttpHandler method runIdPrev.
@GET
@Path("/namespaces/{namespace-id}/apps/{app-id}/{program-type}/{program-id}/runs/{run-id}/logs/prev")
public void runIdPrev(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String appId, @PathParam("program-type") String programType, @PathParam("program-id") String programId, @PathParam("run-id") String runId, @QueryParam("max") @DefaultValue("50") int maxEvents, @QueryParam("fromOffset") @DefaultValue("") String fromOffsetStr, @QueryParam("escape") @DefaultValue("true") boolean escape, @QueryParam("filter") @DefaultValue("") String filterStr, @QueryParam("format") @DefaultValue("text") String format, @QueryParam("suppress") List<String> suppress) throws Exception {
ensureVisibilityOnProgram(namespaceId, appId, programType, programId);
ProgramType type = ProgramType.valueOfCategoryName(programType);
ProgramRunId programRunId = new ProgramRunId(namespaceId, appId, type, programId, runId);
RunRecordDetail runRecord = getRunRecordMeta(programRunId);
LoggingContext loggingContext = LoggingContextHelper.getLoggingContextWithRunId(programRunId, runRecord.getSystemArgs());
doPrev(logReader, responder, loggingContext, maxEvents, fromOffsetStr, escape, filterStr, runRecord, format, suppress);
}
Aggregations