use of org.opencastproject.assetmanager.util.Workflows in project opencast by opencast.
the class TasksEndpoint method createNewTask.
@POST
@Path("/new")
@RestQuery(name = "createNewTask", description = "Creates a new task by the given metadata as JSON", returnDescription = "The task identifiers", restParameters = { @RestParameter(name = "metadata", isRequired = true, description = "The metadata as JSON", type = RestParameter.Type.TEXT) }, reponses = { @RestResponse(responseCode = HttpServletResponse.SC_CREATED, description = "Task sucessfully added"), @RestResponse(responseCode = SC_NOT_FOUND, description = "If the workflow definition is not found"), @RestResponse(responseCode = SC_BAD_REQUEST, description = "If the metadata is not set or couldn't be parsed") })
public Response createNewTask(@FormParam("metadata") String metadata) throws NotFoundException {
if (StringUtils.isBlank(metadata)) {
logger.warn("No metadata set");
return RestUtil.R.badRequest("No metadata set");
}
Gson gson = new Gson();
Map metadataJson = null;
try {
metadataJson = gson.fromJson(metadata, Map.class);
} catch (Exception e) {
logger.warn("Unable to parse metadata {}", metadata);
return RestUtil.R.badRequest("Unable to parse metadata");
}
String workflowId = (String) metadataJson.get("workflow");
if (StringUtils.isBlank(workflowId))
return RestUtil.R.badRequest("No workflow set");
List eventIds = (List) metadataJson.get("eventIds");
if (eventIds == null)
return RestUtil.R.badRequest("No eventIds set");
Map<String, String> configuration = (Map<String, String>) metadataJson.get("configuration");
if (configuration == null) {
configuration = new HashMap<>();
} else {
Iterator<String> confKeyIter = configuration.keySet().iterator();
while (confKeyIter.hasNext()) {
String confKey = confKeyIter.next();
if (StringUtils.equalsIgnoreCase("eventIds", confKey)) {
confKeyIter.remove();
}
}
}
WorkflowDefinition wfd;
try {
wfd = workflowService.getWorkflowDefinitionById(workflowId);
} catch (WorkflowDatabaseException e) {
logger.error("Unable to get workflow definition {}: {}", workflowId, ExceptionUtils.getStackTrace(e));
return RestUtil.R.serverError();
}
final Workflows workflows = new Workflows(assetManager, workspace, workflowService);
final List<WorkflowInstance> instances = workflows.applyWorkflowToLatestVersion(eventIds, workflow(wfd, configuration)).toList();
if (eventIds.size() != instances.size()) {
logger.debug("Can't start one or more tasks.");
return Response.status(Status.BAD_REQUEST).build();
}
return Response.status(Status.CREATED).entity(gson.toJson($(instances).map(getWorkflowIds).toList())).build();
}
use of org.opencastproject.assetmanager.util.Workflows in project opencast by opencast.
the class ToolsEndpoint method editVideo.
@POST
@Path("{mediapackageid}/editor.json")
@Consumes(MediaType.APPLICATION_JSON)
@RestQuery(name = "editVideo", description = "Takes editing information from the client side and processes it", returnDescription = "", pathParameters = { @RestParameter(name = "mediapackageid", description = "The id of the media package", isRequired = true, type = RestParameter.Type.STRING) }, reponses = { @RestResponse(description = "Editing information saved and processed", responseCode = HttpServletResponse.SC_OK), @RestResponse(description = "Media package not found", responseCode = HttpServletResponse.SC_NOT_FOUND), @RestResponse(description = "The editing information cannot be parsed", responseCode = HttpServletResponse.SC_BAD_REQUEST) })
public Response editVideo(@PathParam("mediapackageid") final String mediaPackageId, @Context HttpServletRequest request) throws IndexServiceException, NotFoundException {
String details;
try (InputStream is = request.getInputStream()) {
details = IOUtils.toString(is);
} catch (IOException e) {
logger.error("Error reading request body: {}", getStackTrace(e));
return R.serverError();
}
JSONParser parser = new JSONParser();
EditingInfo editingInfo;
try {
JSONObject detailsJSON = (JSONObject) parser.parse(details);
editingInfo = EditingInfo.parse(detailsJSON);
} catch (Exception e) {
logger.warn("Unable to parse concat information ({}): {}", details, ExceptionUtils.getStackTrace(e));
return R.badRequest("Unable to parse details");
}
final Opt<Event> optEvent = getEvent(mediaPackageId);
if (optEvent.isNone()) {
return R.notFound();
} else {
MediaPackage mediaPackage = index.getEventMediapackage(optEvent.get());
Smil smil;
try {
smil = createSmilCuttingCatalog(editingInfo, mediaPackage);
} catch (Exception e) {
logger.warn("Unable to create a SMIL cutting catalog ({}): {}", details, getStackTrace(e));
return R.badRequest("Unable to create SMIL cutting catalog");
}
try {
addSmilToArchive(mediaPackage, smil);
} catch (IOException e) {
logger.warn("Unable to add SMIL cutting catalog to archive: {}", getStackTrace(e));
return R.serverError();
}
if (editingInfo.getPostProcessingWorkflow().isSome()) {
final String workflowId = editingInfo.getPostProcessingWorkflow().get();
try {
final Workflows workflows = new Workflows(assetManager, workspace, workflowService);
workflows.applyWorkflowToLatestVersion($(mediaPackage.getIdentifier().toString()), ConfiguredWorkflow.workflow(workflowService.getWorkflowDefinitionById(workflowId))).run();
} catch (AssetManagerException e) {
logger.warn("Unable to start workflow '{}' on archived media package '{}': {}", workflowId, mediaPackage, getStackTrace(e));
return R.serverError();
} catch (WorkflowDatabaseException e) {
logger.warn("Unable to load workflow '{}' from workflow service: {}", workflowId, getStackTrace(e));
return R.serverError();
} catch (NotFoundException e) {
logger.warn("Workflow '{}' not found", workflowId);
return R.badRequest("Workflow not found");
}
}
}
return R.ok();
}
use of org.opencastproject.assetmanager.util.Workflows in project opencast by opencast.
the class IBMWatsonTranscriptionServiceTest method testWorkflowDispatcherRunTranscriptionCompletedState.
@Test
public void testWorkflowDispatcherRunTranscriptionCompletedState() throws Exception {
database.storeJobControl(MP_ID, TRACK_ID, JOB_ID, TranscriptionJobControl.Status.Progress.name(), TRACK_DURATION);
database.storeJobControl("mpId2", "audioTrack2", "jobId2", TranscriptionJobControl.Status.Progress.name(), TRACK_DURATION);
database.updateJobControl(JOB_ID, TranscriptionJobControl.Status.TranscriptionComplete.name());
// Mocks for query, result, etc
Snapshot snapshot = EasyMock.createNiceMock(Snapshot.class);
EasyMock.expect(snapshot.getOrganizationId()).andReturn(org.getId());
ARecord aRec = EasyMock.createNiceMock(ARecord.class);
EasyMock.expect(aRec.getSnapshot()).andReturn(Opt.some(snapshot));
Stream<ARecord> recStream = Stream.mk(aRec);
Predicate p = EasyMock.createNiceMock(Predicate.class);
EasyMock.expect(p.and(p)).andReturn(p);
AResult r = EasyMock.createNiceMock(AResult.class);
EasyMock.expect(r.getSize()).andReturn(1L);
EasyMock.expect(r.getRecords()).andReturn(recStream);
Target t = EasyMock.createNiceMock(Target.class);
ASelectQuery selectQuery = EasyMock.createNiceMock(ASelectQuery.class);
EasyMock.expect(selectQuery.where(EasyMock.anyObject(Predicate.class))).andReturn(selectQuery);
EasyMock.expect(selectQuery.run()).andReturn(r);
AQueryBuilder query = EasyMock.createNiceMock(AQueryBuilder.class);
EasyMock.expect(query.snapshot()).andReturn(t);
EasyMock.expect(query.mediaPackageId(EasyMock.anyObject(String.class))).andReturn(p);
EasyMock.expect(query.select(EasyMock.anyObject(Target.class))).andReturn(selectQuery);
VersionField v = EasyMock.createNiceMock(VersionField.class);
EasyMock.expect(v.isLatest()).andReturn(p);
EasyMock.expect(query.version()).andReturn(v);
EasyMock.expect(assetManager.createQuery()).andReturn(query);
EasyMock.replay(snapshot, aRec, p, r, t, selectQuery, query, v, assetManager);
Capture<Set<String>> capturedMpIds = Capture.newInstance();
WorkflowDefinition wfDef = new WorkflowDefinitionImpl();
EasyMock.expect(wfService.getWorkflowDefinitionById(IBMWatsonTranscriptionService.DEFAULT_WF_DEF)).andReturn(wfDef);
List<WorkflowInstance> wfList = new ArrayList<WorkflowInstance>();
wfList.add(new WorkflowInstanceImpl());
Stream<WorkflowInstance> wfListStream = Stream.mk(wfList);
Workflows wfs = EasyMock.createNiceMock(Workflows.class);
EasyMock.expect(wfs.applyWorkflowToLatestVersion(EasyMock.capture(capturedMpIds), EasyMock.anyObject(ConfiguredWorkflow.class))).andReturn(wfListStream);
service.setWfUtil(wfs);
EasyMock.replay(wfService, wfs);
WorkflowDispatcher dispatcher = service.new WorkflowDispatcher();
dispatcher.run();
// Check if only one mp has a workflow created for it
Assert.assertEquals(1, capturedMpIds.getValue().size());
// And if it was the correct one
Assert.assertEquals(MP_ID, capturedMpIds.getValue().iterator().next());
// Check if status in db was updated
TranscriptionJobControl job = database.findByJob(JOB_ID);
Assert.assertNotNull(job);
Assert.assertEquals(TranscriptionJobControl.Status.Closed.name(), job.getStatus());
}
use of org.opencastproject.assetmanager.util.Workflows in project opencast by opencast.
the class IndexServiceImpl method startAddAssetWorkflow.
/**
* Parses the processing information, including the workflowDefinitionId, from the metadataJson and starts the
* workflow with the passed mediapackage.
*
* TODO NOTE: This checks for running workflows, then takes a snapshot prior to starting a new workflow. This causes a
* potential race condition:
*
* 1. An existing workflow is running, the add asset workflow cannot start.
*
* 2. The snapshot(4x) archive(3x) is saved and the new workflow is started.
*
* 3. Possible race condition: No running workflow, a snapshot is saved but the workflow cannot start because another
* workflow has started between the time of checking and starting running.
*
* 4. If race condition: the Admin UI shows error that the workflow could not start.
*
* 5. If race condition: The interim snapshot(4x) archive(3x) is updated(4x-3x) by the running workflow's snapshots
* and resolves the inconsistency, eventually.
*
* Example of processing json:
*
* ...., "processing": { "workflow": "full", "configuration": { "videoPreview": "false", "trimHold": "false",
* "captionHold": "false", "archiveOp": "true", "publishEngage": "true", "publishHarvesting": "true" } }, ....
*
* @param metadataJson
* @param mp
* @return the created workflow instance id
* @throws IndexServiceException
*/
private String startAddAssetWorkflow(JSONObject metadataJson, MediaPackage mediaPackage) throws IndexServiceException {
String wfId = null;
String mpId = mediaPackage.getIdentifier().toString();
JSONObject processing = (JSONObject) metadataJson.get("processing");
if (processing == null)
throw new IllegalArgumentException("No processing field in metadata");
String workflowDefId = (String) processing.get("workflow");
if (workflowDefId == null)
throw new IllegalArgumentException("No workflow definition field in processing metadata");
JSONObject configJson = (JSONObject) processing.get("configuration");
try {
// 1. Check if any active workflows are running for this mediapackage id
WorkflowSet workflowSet = workflowService.getWorkflowInstances(new WorkflowQuery().withMediaPackage(mpId));
for (WorkflowInstance wf : Arrays.asList(workflowSet.getItems())) {
if (wf.isActive()) {
logger.warn("Unable to start new workflow '{}' on archived media package '{}', existing workfow {} is running", workflowDefId, mediaPackage, wf.getId());
throw new IllegalArgumentException("A workflow is already active for mp " + mpId + ", cannot start this workflow.");
}
}
// 2. Save the snapshot
assetManager.takeSnapshot(DEFAULT_OWNER, mediaPackage);
// 3. start the new workflow on the snapshot
// Workflow params are assumed to be String (not mixed with Number)
Map<String, String> params = new HashMap<String, String>();
if (configJson != null) {
for (Object key : configJson.keySet()) {
params.put((String) key, (String) configJson.get(key));
}
}
Set<String> mpIds = new HashSet<String>();
mpIds.add(mpId);
final Workflows workflows = new Workflows(assetManager, workspace, workflowService);
List<WorkflowInstance> wfList = workflows.applyWorkflowToLatestVersion(mpIds, ConfiguredWorkflow.workflow(workflowService.getWorkflowDefinitionById(workflowDefId), params)).toList();
wfId = wfList.size() > 0 ? Long.toString(wfList.get(0).getId()) : "Unknown";
logger.info("Asset update and publish workflow {} scheduled for mp {}", wfId, mpId);
} catch (AssetManagerException e) {
logger.warn("Unable to start workflow '{}' on archived media package '{}': {}", workflowDefId, mediaPackage, getStackTrace(e));
throw new IndexServiceException("Unable to start workflow " + workflowDefId + " on " + mpId);
} catch (WorkflowDatabaseException e) {
logger.warn("Unable to load workflow '{}' from workflow service: {}", wfId, getStackTrace(e));
} catch (NotFoundException e) {
logger.warn("Workflow '{}' not found", wfId);
}
return wfId;
}
use of org.opencastproject.assetmanager.util.Workflows in project opencast by opencast.
the class IBMWatsonTranscriptionServiceTest method testWorkflowDispatcherRunProgressState.
@Test
public void testWorkflowDispatcherRunProgressState() throws Exception {
InputStream stream = IBMWatsonTranscriptionServiceTest.class.getResourceAsStream("/" + PULLED_TRANSCRIPTION_FILE);
database.storeJobControl(MP_ID, TRACK_ID, JOB_ID, TranscriptionJobControl.Status.Progress.name(), 0);
database.storeJobControl("mpId2", "audioTrack2", "jobId2", TranscriptionJobControl.Status.Progress.name(), TRACK_DURATION);
EasyMock.expect(workspace.putInCollection(EasyMock.anyObject(String.class), EasyMock.anyObject(String.class), EasyMock.anyObject(InputStream.class))).andReturn(new URI("http://anything"));
EasyMock.replay(workspace);
HttpEntity httpEntity = EasyMock.createNiceMock(HttpEntity.class);
EasyMock.expect(httpEntity.getContent()).andReturn(stream);
CloseableHttpResponse response = EasyMock.createNiceMock(CloseableHttpResponse.class);
StatusLine status = EasyMock.createNiceMock(StatusLine.class);
EasyMock.expect(response.getStatusLine()).andReturn(status).anyTimes();
EasyMock.expect(response.getEntity()).andReturn(httpEntity).anyTimes();
EasyMock.expect(status.getStatusCode()).andReturn(HttpStatus.SC_OK).anyTimes();
EasyMock.replay(httpEntity, response, status);
Capture<HttpGet> capturedGet = Capture.newInstance();
EasyMock.expect(httpClient.execute(EasyMock.capture(capturedGet))).andReturn(response);
EasyMock.replay(httpClient);
// enrich(q.select(q.snapshot()).where(q.mediaPackageId(mpId).and(q.version().isLatest())).run()).getSnapshots();
// Mocks for query, result, etc
Snapshot snapshot = EasyMock.createNiceMock(Snapshot.class);
EasyMock.expect(snapshot.getOrganizationId()).andReturn(org.getId());
ARecord aRec = EasyMock.createNiceMock(ARecord.class);
EasyMock.expect(aRec.getSnapshot()).andReturn(Opt.some(snapshot));
Stream<ARecord> recStream = Stream.mk(aRec);
Predicate p = EasyMock.createNiceMock(Predicate.class);
EasyMock.expect(p.and(p)).andReturn(p);
AResult r = EasyMock.createNiceMock(AResult.class);
EasyMock.expect(r.getSize()).andReturn(1L);
EasyMock.expect(r.getRecords()).andReturn(recStream);
Target t = EasyMock.createNiceMock(Target.class);
ASelectQuery selectQuery = EasyMock.createNiceMock(ASelectQuery.class);
EasyMock.expect(selectQuery.where(EasyMock.anyObject(Predicate.class))).andReturn(selectQuery);
EasyMock.expect(selectQuery.run()).andReturn(r);
AQueryBuilder query = EasyMock.createNiceMock(AQueryBuilder.class);
EasyMock.expect(query.snapshot()).andReturn(t);
EasyMock.expect(query.mediaPackageId(EasyMock.anyObject(String.class))).andReturn(p);
EasyMock.expect(query.select(EasyMock.anyObject(Target.class))).andReturn(selectQuery);
VersionField v = EasyMock.createNiceMock(VersionField.class);
EasyMock.expect(v.isLatest()).andReturn(p);
EasyMock.expect(query.version()).andReturn(v);
EasyMock.expect(assetManager.createQuery()).andReturn(query);
EasyMock.replay(snapshot, aRec, p, r, t, selectQuery, query, v, assetManager);
Capture<Set<String>> capturedMpIds = Capture.newInstance();
WorkflowDefinition wfDef = new WorkflowDefinitionImpl();
EasyMock.expect(wfService.getWorkflowDefinitionById(IBMWatsonTranscriptionService.DEFAULT_WF_DEF)).andReturn(wfDef);
List<WorkflowInstance> wfList = new ArrayList<WorkflowInstance>();
wfList.add(new WorkflowInstanceImpl());
Stream<WorkflowInstance> wfListStream = Stream.mk(wfList);
Workflows wfs = EasyMock.createNiceMock(Workflows.class);
EasyMock.expect(wfs.applyWorkflowToLatestVersion(EasyMock.capture(capturedMpIds), EasyMock.anyObject(ConfiguredWorkflow.class))).andReturn(wfListStream);
service.setWfUtil(wfs);
EasyMock.replay(wfService, wfs);
WorkflowDispatcher dispatcher = service.new WorkflowDispatcher();
dispatcher.run();
// Check if it called the external service to get the results
Assert.assertEquals("https://stream.watsonplatform.net/speech-to-text/api/v1/recognitions/" + JOB_ID, capturedGet.getValue().getURI().toString());
// Check if only one mp has a workflow created for it
Assert.assertEquals(1, capturedMpIds.getValue().size());
// And if it was the correct one
Assert.assertEquals(MP_ID, capturedMpIds.getValue().iterator().next());
// Check if status in db was updated
TranscriptionJobControl job = database.findByJob(JOB_ID);
Assert.assertNotNull(job);
Assert.assertEquals(TranscriptionJobControl.Status.Closed.name(), job.getStatus());
}
Aggregations