use of co.cask.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class AppMetadataStore method getProgramRunIdMap.
/** Converts MDSkeys in the map to ProgramIDs
*
* @param keymap map with keys as MDSkeys
* @return map with keys as program IDs
*/
private Map<ProgramRunId, RunRecordMeta> getProgramRunIdMap(Map<MDSKey, RunRecordMeta> keymap) {
Map<ProgramRunId, RunRecordMeta> programRunIdMap = new LinkedHashMap<>();
for (Map.Entry<MDSKey, RunRecordMeta> entry : keymap.entrySet()) {
ProgramId programId = getProgramID(entry.getKey());
programRunIdMap.put(programId.run(entry.getValue().getPid()), entry.getValue());
}
return programRunIdMap;
}
use of co.cask.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class LineageAdmin method getWorkflowProgramRunid.
@Nullable
private ProgramRunId getWorkflowProgramRunid(Relation relation, Map<ProgramRunId, RunRecordMeta> runRecordMap, Map<String, ProgramRunId> workflowIdMap) {
ProgramRunId workflowProgramRunId = null;
RunRecordMeta runRecord = runRecordMap.get(new ProgramRunId(relation.getProgram().getNamespace(), relation.getProgram().getApplication(), relation.getProgram().getType(), relation.getProgram().getProgram(), relation.getRun().getId()));
if (runRecord != null && runRecord.getProperties().containsKey("workflowrunid")) {
String workflowRunId = runRecord.getProperties().get("workflowrunid");
workflowProgramRunId = workflowIdMap.get(workflowRunId);
}
return workflowProgramRunId;
}
use of co.cask.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class LineageAdmin method doComputeRollupLineage.
private Multimap<RelationKey, Relation> doComputeRollupLineage(Multimap<RelationKey, Relation> relations) throws NotFoundException {
// Make a set of all ProgramIDs in the relations
Set<ProgramRunId> programRunIdSet = new HashSet<>();
for (Relation relation : Iterables.concat(relations.values())) {
programRunIdSet.add(new ProgramRunId(relation.getProgram().getNamespace(), relation.getProgram().getApplication(), relation.getProgram().getType(), relation.getProgram().getProgram(), relation.getRun().getId()));
}
// Get RunRecordMeta for all these ProgramRunIDs
final Map<ProgramRunId, RunRecordMeta> runRecordMap = store.getRuns(programRunIdSet);
// Get workflow Run IDs for all the programs in the relations
final Set<String> workflowIDs = getWorkflowIds(relations, runRecordMap);
// Get Program IDs for workflow Run IDs
// TODO: These scans could be expensive. CDAP-7571.
Map<ProgramRunId, RunRecordMeta> workflowRunRecordMap = store.getRuns(ProgramRunStatus.ALL, new Predicate<RunRecordMeta>() {
@Override
public boolean apply(RunRecordMeta input) {
return workflowIDs.contains(input.getPid());
}
});
// Create a map from RunId to ProgramId for all workflows
Map<String, ProgramRunId> workflowIdMap = new HashMap<>();
for (Map.Entry<ProgramRunId, RunRecordMeta> entry : workflowRunRecordMap.entrySet()) {
workflowIdMap.put(entry.getValue().getPid(), entry.getKey());
}
// For all relations, replace ProgramIds with workflow ProgramIds
return getRollupRelations(relations, runRecordMap, workflowIdMap);
}
use of co.cask.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class LogHandler method getRunRecordMeta.
private RunRecordMeta getRunRecordMeta(String namespace, String app, ProgramType programType, String programName, String run) throws NotFoundException {
ProgramRunId programRunId = new ProgramRunId(namespace, app, programType, programName, run);
RunRecordMeta runRecord = programStore.getRun(programRunId.getParent(), programRunId.getRun());
if (runRecord == null) {
throw new NotFoundException(programRunId);
}
return runRecord;
}
use of co.cask.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class SparkExecutionServiceTest method testWorkflowToken.
@Test
public void testWorkflowToken() throws Exception {
ProgramRunId programRunId = new ProgramRunId("ns", "app", ProgramType.SPARK, "test", RunIds.generate().getId());
// Start a service with empty workflow token
BasicWorkflowToken token = new BasicWorkflowToken(10);
token.setCurrentNode("spark");
SparkExecutionService service = new SparkExecutionService(locationFactory, InetAddress.getLoopbackAddress().getCanonicalHostName(), programRunId, token);
service.startAndWait();
try {
SparkExecutionClient client = new SparkExecutionClient(service.getBaseURI(), programRunId);
// Update token via heartbeat
BasicWorkflowToken clientToken = new BasicWorkflowToken(10);
clientToken.setCurrentNode("spark");
for (int i = 0; i < 5; i++) {
clientToken.put("key", "value" + i);
client.heartbeat(clientToken);
// The server side token should get updated
Assert.assertEquals(Value.of("value" + i), token.get("key", "spark"));
}
clientToken.put("completed", "true");
client.completed(clientToken);
} finally {
service.stopAndWait();
}
// The token on the service side should get updated after the completed call.
Map<String, Value> values = token.getAllFromNode("spark");
Map<String, Value> expected = ImmutableMap.of("key", Value.of("value4"), "completed", Value.of("true"));
Assert.assertEquals(expected, values);
}
Aggregations