use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class IngestServiceImpl method addZippedMediaPackage.
/**
* {@inheritDoc}
*
* @see org.opencastproject.ingest.api.IngestService#addZippedMediaPackage(java.io.InputStream, java.lang.String,
* java.util.Map, java.lang.Long)
*/
@Override
public WorkflowInstance addZippedMediaPackage(InputStream zipStream, String workflowDefinitionId, Map<String, String> workflowConfig, Long workflowInstanceId) throws MediaPackageException, IOException, IngestException, NotFoundException, UnauthorizedException {
// Start a job synchronously. We can't keep the open input stream waiting around.
Job job = null;
if (StringUtils.isNotBlank(workflowDefinitionId)) {
try {
workflowService.getWorkflowDefinitionById(workflowDefinitionId);
} catch (WorkflowDatabaseException e) {
throw new IngestException(e);
} catch (NotFoundException nfe) {
logger.warn("Workflow definition {} not found, using default workflow {} instead", workflowDefinitionId, defaultWorkflowDefinionId);
workflowDefinitionId = defaultWorkflowDefinionId;
}
}
if (workflowInstanceId != null) {
logger.warn("Deprecated method! Ingesting zipped mediapackage with workflow {}", workflowInstanceId);
} else {
logger.info("Ingesting zipped mediapackage");
}
ZipArchiveInputStream zis = null;
Set<String> collectionFilenames = new HashSet<>();
try {
// We don't need anybody to do the dispatching for us. Therefore we need to make sure that the job is never in
// QUEUED state but set it to INSTANTIATED in the beginning and then manually switch it to RUNNING.
job = serviceRegistry.createJob(JOB_TYPE, INGEST_ZIP, null, null, false, ingestZipJobLoad);
job.setStatus(Status.RUNNING);
job = serviceRegistry.updateJob(job);
// Create the working file target collection for this ingest operation
String wfrCollectionId = Long.toString(job.getId());
zis = new ZipArchiveInputStream(zipStream);
ZipArchiveEntry entry;
MediaPackage mp = null;
Map<String, URI> uris = new HashMap<>();
// Sequential number to append to file names so that, if two files have the same
// name, one does not overwrite the other (see MH-9688)
int seq = 1;
// Folder name to compare with next one to figure out if there's a root folder
String folderName = null;
// Indicates if zip has a root folder or not, initialized as true
boolean hasRootFolder = true;
// While there are entries write them to a collection
while ((entry = zis.getNextZipEntry()) != null) {
try {
if (entry.isDirectory() || entry.getName().contains("__MACOSX"))
continue;
if (entry.getName().endsWith("manifest.xml") || entry.getName().endsWith("index.xml")) {
// Build the mediapackage
mp = loadMediaPackageFromManifest(new ZipEntryInputStream(zis, entry.getSize()));
} else {
logger.info("Storing zip entry {}/{} in working file repository collection '{}'", job.getId(), entry.getName(), wfrCollectionId);
// Since the directory structure is not being mirrored, makes sure the file
// name is different than the previous one(s) by adding a sequential number
String fileName = FilenameUtils.getBaseName(entry.getName()) + "_" + seq++ + "." + FilenameUtils.getExtension(entry.getName());
URI contentUri = workingFileRepository.putInCollection(wfrCollectionId, fileName, new ZipEntryInputStream(zis, entry.getSize()));
collectionFilenames.add(fileName);
// Key is the zip entry name as it is
String key = entry.getName();
uris.put(key, contentUri);
ingestStatistics.add(entry.getSize());
logger.info("Zip entry {}/{} stored at {}", job.getId(), entry.getName(), contentUri);
// Figures out if there's a root folder. Does entry name starts with a folder?
int pos = entry.getName().indexOf('/');
if (pos == -1) {
// No, we can conclude there's no root folder
hasRootFolder = false;
} else if (hasRootFolder && folderName != null && !folderName.equals(entry.getName().substring(0, pos))) {
// Folder name different from previous so there's no root folder
hasRootFolder = false;
} else if (folderName == null) {
// Just initialize folder name
folderName = entry.getName().substring(0, pos);
}
}
} catch (IOException e) {
logger.warn("Unable to process zip entry {}: {}", entry.getName(), e);
throw e;
}
}
if (mp == null)
throw new MediaPackageException("No manifest found in this zip");
// Determine the mediapackage identifier
if (mp.getIdentifier() == null || isBlank(mp.getIdentifier().toString()))
mp.setIdentifier(new UUIDIdBuilderImpl().createNew());
String mediaPackageId = mp.getIdentifier().toString();
logger.info("Ingesting mediapackage {} is named '{}'", mediaPackageId, mp.getTitle());
// Make sure there are tracks in the mediapackage
if (mp.getTracks().length == 0) {
logger.warn("Mediapackage {} has no media tracks", mediaPackageId);
}
// Update the element uris to point to their working file repository location
for (MediaPackageElement element : mp.elements()) {
// Key has root folder name if there is one
URI uri = uris.get((hasRootFolder ? folderName + "/" : "") + element.getURI().toString());
if (uri == null)
throw new MediaPackageException("Unable to map element name '" + element.getURI() + "' to workspace uri");
logger.info("Ingested mediapackage element {}/{} located at {}", mediaPackageId, element.getIdentifier(), uri);
URI dest = workingFileRepository.moveTo(wfrCollectionId, FilenameUtils.getName(uri.toString()), mediaPackageId, element.getIdentifier(), FilenameUtils.getName(element.getURI().toString()));
element.setURI(dest);
// TODO: This should be triggered somehow instead of being handled here
if (MediaPackageElements.SERIES.equals(element.getFlavor())) {
logger.info("Ingested mediapackage {} contains updated series information", mediaPackageId);
updateSeries(element.getURI());
}
}
// Now that all elements are in place, start with ingest
logger.info("Initiating processing of ingested mediapackage {}", mediaPackageId);
WorkflowInstance workflowInstance = ingest(mp, workflowDefinitionId, workflowConfig, workflowInstanceId);
logger.info("Ingest of mediapackage {} done", mediaPackageId);
job.setStatus(Job.Status.FINISHED);
return workflowInstance;
} catch (ServiceRegistryException e) {
throw new IngestException(e);
} catch (MediaPackageException e) {
job.setStatus(Job.Status.FAILED, Job.FailureReason.DATA);
throw e;
} catch (Exception e) {
if (e instanceof IngestException)
throw (IngestException) e;
throw new IngestException(e);
} finally {
IOUtils.closeQuietly(zis);
finallyUpdateJob(job);
for (String filename : collectionFilenames) {
workingFileRepository.deleteFromCollection(Long.toString(job.getId()), filename, true);
}
}
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class JobEndpoint method getTasksAsJSON.
/**
* Returns the list of tasks matching the given query as JSON Object
*
* @param query
* The worklfow query
* @return The list of matching tasks as JSON Object
* @throws JobEndpointException
* @throws NotFoundException
*/
public JObject getTasksAsJSON(WorkflowQuery query) throws JobEndpointException, NotFoundException {
// Get results
WorkflowSet workflowInstances = null;
long totalWithoutFilters = 0;
List<JValue> jsonList = new ArrayList<>();
try {
workflowInstances = workflowService.getWorkflowInstances(query);
totalWithoutFilters = workflowService.countWorkflowInstances();
} catch (WorkflowDatabaseException e) {
throw new JobEndpointException(String.format("Not able to get the list of job from the database: %s", e), e.getCause());
}
WorkflowInstance[] items = workflowInstances.getItems();
for (WorkflowInstance instance : items) {
long instanceId = instance.getId();
String series = instance.getMediaPackage().getSeriesTitle();
// Retrieve submission date with the workflow instance main job
Date created;
try {
created = serviceRegistry.getJob(instanceId).getDateCreated();
} catch (ServiceRegistryException e) {
throw new JobEndpointException(String.format("Error when retrieving job %s from the service registry: %s", instanceId, e), e.getCause());
}
jsonList.add(obj(f("id", v(instanceId)), f("title", v(nul(instance.getMediaPackage().getTitle()).getOr(""))), f("series", v(series, Jsons.BLANK)), f("workflow", v(instance.getTitle(), Jsons.BLANK)), f("status", v(instance.getState().toString())), f("submitted", v(created != null ? DateTimeSupport.toUTC(created.getTime()) : ""))));
}
JObject json = obj(f("results", arr(jsonList)), f("count", v(workflowInstances.getTotalCount())), f("offset", v(query.getStartPage())), f("limit", v(jsonList.size())), f("total", v(totalWithoutFilters)));
return json;
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class TasksEndpoint method createNewTask.
@POST
@Path("/new")
@RestQuery(name = "createNewTask", description = "Creates a new task by the given metadata as JSON", returnDescription = "The task identifiers", restParameters = { @RestParameter(name = "metadata", isRequired = true, description = "The metadata as JSON", type = RestParameter.Type.TEXT) }, reponses = { @RestResponse(responseCode = HttpServletResponse.SC_CREATED, description = "Task sucessfully added"), @RestResponse(responseCode = SC_NOT_FOUND, description = "If the workflow definition is not found"), @RestResponse(responseCode = SC_BAD_REQUEST, description = "If the metadata is not set or couldn't be parsed") })
public Response createNewTask(@FormParam("metadata") String metadata) throws NotFoundException {
if (StringUtils.isBlank(metadata)) {
logger.warn("No metadata set");
return RestUtil.R.badRequest("No metadata set");
}
Gson gson = new Gson();
Map metadataJson = null;
try {
metadataJson = gson.fromJson(metadata, Map.class);
} catch (Exception e) {
logger.warn("Unable to parse metadata {}", metadata);
return RestUtil.R.badRequest("Unable to parse metadata");
}
String workflowId = (String) metadataJson.get("workflow");
if (StringUtils.isBlank(workflowId))
return RestUtil.R.badRequest("No workflow set");
List eventIds = (List) metadataJson.get("eventIds");
if (eventIds == null)
return RestUtil.R.badRequest("No eventIds set");
Map<String, String> configuration = (Map<String, String>) metadataJson.get("configuration");
if (configuration == null) {
configuration = new HashMap<>();
} else {
Iterator<String> confKeyIter = configuration.keySet().iterator();
while (confKeyIter.hasNext()) {
String confKey = confKeyIter.next();
if (StringUtils.equalsIgnoreCase("eventIds", confKey)) {
confKeyIter.remove();
}
}
}
WorkflowDefinition wfd;
try {
wfd = workflowService.getWorkflowDefinitionById(workflowId);
} catch (WorkflowDatabaseException e) {
logger.error("Unable to get workflow definition {}: {}", workflowId, ExceptionUtils.getStackTrace(e));
return RestUtil.R.serverError();
}
final Workflows workflows = new Workflows(assetManager, workspace, workflowService);
final List<WorkflowInstance> instances = workflows.applyWorkflowToLatestVersion(eventIds, workflow(wfd, configuration)).toList();
if (eventIds.size() != instances.size()) {
logger.debug("Can't start one or more tasks.");
return Response.status(Status.BAD_REQUEST).build();
}
return Response.status(Status.CREATED).entity(gson.toJson($(instances).map(getWorkflowIds).toList())).build();
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class TasksEndpoint method getProcessing.
@GET
@Path("processing.json")
@RestQuery(name = "getProcessing", description = "Returns all the data related to the processing tab in the new tasks modal as JSON", returnDescription = "All the data related to the tasks processing tab as JSON", restParameters = { @RestParameter(name = "tags", isRequired = false, description = "A comma separated list of tags to filter the workflow definitions", type = RestParameter.Type.STRING) }, reponses = { @RestResponse(responseCode = SC_OK, description = "Returns all the data related to the tasks processing tab as JSON") })
public Response getProcessing(@QueryParam("tags") String tagsString) {
List<String> tags = RestUtil.splitCommaSeparatedParam(Option.option(tagsString)).value();
// This is the JSON Object which will be returned by this request
List<JValue> actions = new ArrayList<>();
try {
List<WorkflowDefinition> workflowsDefinitions = workflowService.listAvailableWorkflowDefinitions();
for (WorkflowDefinition wflDef : workflowsDefinitions) {
if (wflDef.containsTag(tags)) {
actions.add(obj(f("id", v(wflDef.getId())), f("title", v(nul(wflDef.getTitle()).getOr(""))), f("description", v(nul(wflDef.getDescription()).getOr(""))), f("configuration_panel", v(nul(wflDef.getConfigurationPanel()).getOr("")))));
}
}
} catch (WorkflowDatabaseException e) {
logger.error("Unable to get available workflow definitions: {}", ExceptionUtils.getStackTrace(e));
return RestUtil.R.serverError();
}
return okJson(arr(actions));
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class ToolsEndpoint method editVideo.
@POST
@Path("{mediapackageid}/editor.json")
@Consumes(MediaType.APPLICATION_JSON)
@RestQuery(name = "editVideo", description = "Takes editing information from the client side and processes it", returnDescription = "", pathParameters = { @RestParameter(name = "mediapackageid", description = "The id of the media package", isRequired = true, type = RestParameter.Type.STRING) }, reponses = { @RestResponse(description = "Editing information saved and processed", responseCode = HttpServletResponse.SC_OK), @RestResponse(description = "Media package not found", responseCode = HttpServletResponse.SC_NOT_FOUND), @RestResponse(description = "The editing information cannot be parsed", responseCode = HttpServletResponse.SC_BAD_REQUEST) })
public Response editVideo(@PathParam("mediapackageid") final String mediaPackageId, @Context HttpServletRequest request) throws IndexServiceException, NotFoundException {
String details;
try (InputStream is = request.getInputStream()) {
details = IOUtils.toString(is);
} catch (IOException e) {
logger.error("Error reading request body: {}", getStackTrace(e));
return R.serverError();
}
JSONParser parser = new JSONParser();
EditingInfo editingInfo;
try {
JSONObject detailsJSON = (JSONObject) parser.parse(details);
editingInfo = EditingInfo.parse(detailsJSON);
} catch (Exception e) {
logger.warn("Unable to parse concat information ({}): {}", details, ExceptionUtils.getStackTrace(e));
return R.badRequest("Unable to parse details");
}
final Opt<Event> optEvent = getEvent(mediaPackageId);
if (optEvent.isNone()) {
return R.notFound();
} else {
MediaPackage mediaPackage = index.getEventMediapackage(optEvent.get());
Smil smil;
try {
smil = createSmilCuttingCatalog(editingInfo, mediaPackage);
} catch (Exception e) {
logger.warn("Unable to create a SMIL cutting catalog ({}): {}", details, getStackTrace(e));
return R.badRequest("Unable to create SMIL cutting catalog");
}
try {
addSmilToArchive(mediaPackage, smil);
} catch (IOException e) {
logger.warn("Unable to add SMIL cutting catalog to archive: {}", getStackTrace(e));
return R.serverError();
}
if (editingInfo.getPostProcessingWorkflow().isSome()) {
final String workflowId = editingInfo.getPostProcessingWorkflow().get();
try {
final Workflows workflows = new Workflows(assetManager, workspace, workflowService);
workflows.applyWorkflowToLatestVersion($(mediaPackage.getIdentifier().toString()), ConfiguredWorkflow.workflow(workflowService.getWorkflowDefinitionById(workflowId))).run();
} catch (AssetManagerException e) {
logger.warn("Unable to start workflow '{}' on archived media package '{}': {}", workflowId, mediaPackage, getStackTrace(e));
return R.serverError();
} catch (WorkflowDatabaseException e) {
logger.warn("Unable to load workflow '{}' from workflow service: {}", workflowId, getStackTrace(e));
return R.serverError();
} catch (NotFoundException e) {
logger.warn("Workflow '{}' not found", workflowId);
return R.badRequest("Workflow not found");
}
}
}
return R.ok();
}
Aggregations