use of org.opencastproject.workflow.api.WorkflowParsingException in project opencast by opencast.
the class WorkflowServiceImpl method resume.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.WorkflowService#resume(long, Map)
*/
@Override
public WorkflowInstance resume(long workflowInstanceId, Map<String, String> properties) throws WorkflowException, NotFoundException, IllegalStateException, UnauthorizedException {
WorkflowInstance workflowInstance = getWorkflowById(workflowInstanceId);
if (!WorkflowState.PAUSED.equals(workflowInstance.getState()))
throw new IllegalStateException("Can not resume a workflow where the current state is not in paused");
workflowInstance = updateConfiguration(workflowInstance, properties);
update(workflowInstance);
WorkflowOperationInstance currentOperation = workflowInstance.getCurrentOperation();
// Is the workflow done?
if (currentOperation == null) {
// Let's make sure we didn't miss any failed operation, since the workflow state could have been
// switched to paused while processing the error handling workflow extension
workflowInstance.setState(SUCCEEDED);
for (WorkflowOperationInstance op : workflowInstance.getOperations()) {
if (op.getState().equals(WorkflowOperationInstance.OperationState.FAILED)) {
if (op.isFailWorkflowOnException()) {
workflowInstance.setState(FAILED);
break;
}
}
}
// Save the resumed workflow to the database
logger.debug("%s has %s", workflowInstance, workflowInstance.getState());
update(workflowInstance);
return workflowInstance;
}
// certain operations. In the latter case, there is no current paused operation.
if (OperationState.INSTANTIATED.equals(currentOperation.getState())) {
try {
// the operation has its own job. Update that too.
Job operationJob = serviceRegistry.createJob(JOB_TYPE, Operation.START_OPERATION.toString(), Arrays.asList(Long.toString(workflowInstanceId)), null, false, null, WORKFLOW_JOB_LOAD);
// this method call is publicly visible, so it doesn't necessarily go through the accept method. Set the
// workflow state manually.
workflowInstance.setState(RUNNING);
currentOperation.setId(operationJob.getId());
// update the workflow and its associated job
update(workflowInstance);
// Now set this job to be queued so it can be dispatched
operationJob.setStatus(Status.QUEUED);
operationJob.setDispatchable(true);
operationJob = serviceRegistry.updateJob(operationJob);
return workflowInstance;
} catch (ServiceRegistryException e) {
throw new WorkflowDatabaseException(e);
}
}
Long operationJobId = workflowInstance.getCurrentOperation().getId();
if (operationJobId == null)
throw new IllegalStateException("Can not resume a workflow where the current operation has no associated id");
// Set the current operation's job to queued, so it gets picked up again
Job workflowJob;
try {
workflowJob = serviceRegistry.getJob(workflowInstanceId);
workflowJob.setStatus(Status.RUNNING);
workflowJob.setPayload(WorkflowParser.toXml(workflowInstance));
workflowJob = serviceRegistry.updateJob(workflowJob);
Job operationJob = serviceRegistry.getJob(operationJobId);
operationJob.setStatus(Status.QUEUED);
operationJob.setDispatchable(true);
if (properties != null) {
Properties props = new Properties();
props.putAll(properties);
ByteArrayOutputStream out = new ByteArrayOutputStream();
props.store(out, null);
List<String> newArguments = new ArrayList<String>(operationJob.getArguments());
newArguments.add(new String(out.toByteArray(), "UTF-8"));
operationJob.setArguments(newArguments);
}
operationJob = serviceRegistry.updateJob(operationJob);
} catch (ServiceRegistryException e) {
throw new WorkflowDatabaseException(e);
} catch (IOException e) {
throw new WorkflowParsingException("Unable to parse workflow and/or workflow properties");
}
return workflowInstance;
}
use of org.opencastproject.workflow.api.WorkflowParsingException in project opencast by opencast.
the class WorkflowServiceImpl method start.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.WorkflowService#start(org.opencastproject.workflow.api.WorkflowDefinition,
* org.opencastproject.mediapackage.MediaPackage, Long, java.util.Map)
*/
@Override
public WorkflowInstance start(WorkflowDefinition workflowDefinition, MediaPackage sourceMediaPackage, Long parentWorkflowId, Map<String, String> properties) throws WorkflowDatabaseException, WorkflowParsingException, NotFoundException {
// We have to synchronize per media package to avoid starting multiple simultaneous workflows for one media package.
final Lock lock = mediaPackageLocks.get(sourceMediaPackage.getIdentifier().toString());
lock.lock();
try {
logger.startUnitOfWork();
if (workflowDefinition == null)
throw new IllegalArgumentException("workflow definition must not be null");
if (sourceMediaPackage == null)
throw new IllegalArgumentException("mediapackage must not be null");
for (List<String> errors : MediaPackageSupport.sanityCheck(sourceMediaPackage)) {
throw new IllegalArgumentException("Insane media package cannot be processed: " + mkString(errors, "; "));
}
if (parentWorkflowId != null) {
try {
// Let NotFoundException bubble up
getWorkflowById(parentWorkflowId);
} catch (UnauthorizedException e) {
throw new IllegalArgumentException("Parent workflow " + parentWorkflowId + " not visible to this user");
}
} else {
WorkflowQuery wfq = new WorkflowQuery().withMediaPackage(sourceMediaPackage.getIdentifier().compact());
WorkflowSet mpWorkflowInstances = getWorkflowInstances(wfq);
if (mpWorkflowInstances.size() > 0) {
for (WorkflowInstance wfInstance : mpWorkflowInstances.getItems()) {
if (wfInstance.isActive())
throw new IllegalStateException(String.format("Can't start workflow '%s' for media package '%s' because another workflow is currently active.", workflowDefinition.getTitle(), sourceMediaPackage.getIdentifier().compact()));
}
}
}
// Get the current user
User currentUser = securityService.getUser();
if (currentUser == null)
throw new SecurityException("Current user is unknown");
// Get the current organization
Organization organization = securityService.getOrganization();
if (organization == null)
throw new SecurityException("Current organization is unknown");
WorkflowInstance workflowInstance = new WorkflowInstanceImpl(workflowDefinition, sourceMediaPackage, parentWorkflowId, currentUser, organization, properties);
workflowInstance = updateConfiguration(workflowInstance, properties);
// Create and configure the workflow instance
try {
// Create a new job for this workflow instance
String workflowDefinitionXml = WorkflowParser.toXml(workflowDefinition);
String workflowInstanceXml = WorkflowParser.toXml(workflowInstance);
String mediaPackageXml = MediaPackageParser.getAsXml(sourceMediaPackage);
List<String> arguments = new ArrayList<String>();
arguments.add(workflowDefinitionXml);
arguments.add(mediaPackageXml);
if (parentWorkflowId != null || properties != null) {
String parentWorkflowIdString = (parentWorkflowId != null) ? parentWorkflowId.toString() : NULL_PARENT_ID;
arguments.add(parentWorkflowIdString);
}
if (properties != null) {
arguments.add(mapToString(properties));
}
Job job = serviceRegistry.createJob(JOB_TYPE, Operation.START_WORKFLOW.toString(), arguments, workflowInstanceXml, false, null, WORKFLOW_JOB_LOAD);
// Have the workflow take on the job's identity
workflowInstance.setId(job.getId());
// Add the workflow to the search index and have the job enqueued for dispatch.
// Update also sets ACL and mediapackage metadata
update(workflowInstance);
return workflowInstance;
} catch (Throwable t) {
try {
workflowInstance.setState(FAILED);
update(workflowInstance);
} catch (Exception failureToFail) {
logger.warn(failureToFail, "Unable to update workflow to failed state");
}
throw new WorkflowDatabaseException(t);
}
} finally {
logger.endUnitOfWork();
lock.unlock();
}
}
use of org.opencastproject.workflow.api.WorkflowParsingException in project opencast by opencast.
the class WorkflowServiceImpl method repopulate.
@Override
public void repopulate(final String indexName) throws Exception {
List<String> workflows = serviceRegistry.getJobPayloads(Operation.START_WORKFLOW.toString());
final String destinationId = WorkflowItem.WORKFLOW_QUEUE_PREFIX + indexName.substring(0, 1).toUpperCase() + indexName.substring(1);
if (workflows.size() > 0) {
final int total = workflows.size();
logger.info("Populating index '{}' with {} workflows", indexName, total);
final int responseInterval = (total < 100) ? 1 : (total / 100);
int current = 0;
for (final String workflow : workflows) {
current += 1;
if (StringUtils.isEmpty(workflow)) {
logger.warn("Skipping restoring of workflow no {}: Payload is empty", current);
continue;
}
WorkflowInstance instance;
try {
instance = WorkflowParser.parseWorkflowInstance(workflow);
} catch (WorkflowParsingException e) {
logger.warn("Skipping restoring of workflow. Error parsing: {}", workflow, e);
continue;
}
Organization organization = instance.getOrganization();
SecurityUtil.runAs(securityService, organization, SecurityUtil.createSystemUser(componentContext, organization), new Effect0() {
@Override
public void run() {
// Send message to update index item
messageSender.sendObjectMessage(destinationId, MessageSender.DestinationType.Queue, WorkflowItem.updateInstance(instance));
}
});
if ((current % responseInterval == 0) || (current == total)) {
logger.info("Updating {} workflow index {}/{}: {} percent complete.", indexName, current, total, current * 100 / total);
}
}
}
logger.info("Finished populating {} index with workflows", indexName);
Organization organization = new DefaultOrganization();
SecurityUtil.runAs(securityService, organization, SecurityUtil.createSystemUser(componentContext, organization), new Effect0() {
@Override
protected void run() {
messageSender.sendObjectMessage(IndexProducer.RESPONSE_QUEUE, MessageSender.DestinationType.Queue, IndexRecreateObject.end(indexName, IndexRecreateObject.Service.Workflow));
}
});
}
use of org.opencastproject.workflow.api.WorkflowParsingException in project opencast by opencast.
the class WorkflowServiceImpl method isReadyToAccept.
/**
* {@inheritDoc}
*
* If we are already running the maximum number of workflows, don't accept another START_WORKFLOW job
*
* @see org.opencastproject.job.api.AbstractJobProducer#isReadyToAccept(org.opencastproject.job.api.Job)
*/
@Override
public boolean isReadyToAccept(Job job) throws ServiceRegistryException, UndispatchableJobException {
String operation = job.getOperation();
// Only restrict execution of new jobs
if (!Operation.START_WORKFLOW.toString().equals(operation))
return true;
// If the first operation is guaranteed to pause, run the job.
if (job.getArguments().size() > 1 && job.getArguments().get(0) != null) {
try {
WorkflowDefinition workflowDef = WorkflowParser.parseWorkflowDefinition(job.getArguments().get(0));
if (workflowDef.getOperations().size() > 0) {
String firstOperationId = workflowDef.getOperations().get(0).getId();
WorkflowOperationHandler handler = getWorkflowOperationHandler(firstOperationId);
if (handler instanceof ResumableWorkflowOperationHandler) {
if (((ResumableWorkflowOperationHandler) handler).isAlwaysPause()) {
return true;
}
}
}
} catch (WorkflowParsingException e) {
throw new UndispatchableJobException(job + " is not a proper job to start a workflow", e);
}
}
WorkflowInstance workflow = null;
WorkflowSet workflowInstances = null;
String mediaPackageId = null;
// Fetch all workflows that are running with the current mediapackage
try {
workflow = getWorkflowById(job.getId());
mediaPackageId = workflow.getMediaPackage().getIdentifier().toString();
workflowInstances = getWorkflowInstances(new WorkflowQuery().withMediaPackage(workflow.getMediaPackage().getIdentifier().toString()).withState(RUNNING).withState(PAUSED).withState(FAILING));
} catch (NotFoundException e) {
logger.error("Trying to start workflow with id %s but no corresponding instance is available from the workflow service", job.getId());
throw new UndispatchableJobException(e);
} catch (UnauthorizedException e) {
logger.error("Authorization denied while requesting to loading workflow instance %s: %s", job.getId(), e.getMessage());
throw new UndispatchableJobException(e);
} catch (WorkflowDatabaseException e) {
logger.error("Error loading workflow instance %s: %s", job.getId(), e.getMessage());
return false;
}
// If more than one workflow is running working on this mediapackage, then we don't start this one
boolean toomany = workflowInstances.size() > 1;
// Make sure we are not excluding ourselves
toomany |= workflowInstances.size() == 1 && workflow.getId() != workflowInstances.getItems()[0].getId();
// Avoid running multiple workflows with same media package id at the same time
if (toomany) {
if (!delayedWorkflows.contains(workflow.getId())) {
logger.info("Delaying start of workflow %s, another workflow on media package %s is still running", workflow.getId(), mediaPackageId);
delayedWorkflows.add(workflow.getId());
}
return false;
}
return true;
}
use of org.opencastproject.workflow.api.WorkflowParsingException in project opencast by opencast.
the class WorkflowServiceSolrIndex method activate.
/**
* Activates the index by configuring solr with the server url that must have been set previously.
*/
public void activate(String systemUserName) {
// Set up the solr server
if (solrServerUrl != null) {
solrServer = SolrServerFactory.newRemoteInstance(solrServerUrl);
} else {
try {
setupSolr(new File(solrRoot));
} catch (IOException e) {
throw new IllegalStateException("Unable to connect to solr at " + solrRoot, e);
} catch (SolrServerException e) {
throw new IllegalStateException("Unable to connect to solr at " + solrRoot, e);
}
}
// If the solr is empty, add all of the existing workflows
long instancesInSolr = 0;
try {
instancesInSolr = count();
} catch (WorkflowDatabaseException e) {
throw new IllegalStateException(e);
}
if (instancesInSolr == 0) {
logger.info("The workflow index is empty, looking for workflows to index");
// this may be a new index, so get all of the existing workflows and index them
List<String> workflowPayloads;
try {
workflowPayloads = serviceRegistry.getJobPayloads(WorkflowServiceImpl.Operation.START_WORKFLOW.toString());
} catch (ServiceRegistryException e) {
logger.error("Unable to load the workflows jobs: {}", e.getMessage());
throw new ServiceException(e.getMessage());
}
final int total = workflowPayloads.size();
if (total == 0) {
logger.info("No workflows found. Repopulating index finished.");
return;
}
logger.info("Populating the workflow index with {} workflows", total);
int current = 0;
for (String payload : workflowPayloads) {
current++;
WorkflowInstance instance = null;
try {
instance = WorkflowParser.parseWorkflowInstance(payload);
Organization organization = instance.getOrganization();
securityService.setOrganization(organization);
securityService.setUser(SecurityUtil.createSystemUser(systemUserName, organization));
index(instance);
} catch (WorkflowParsingException | WorkflowDatabaseException e) {
logger.warn("Skipping restoring of workflow {}", payload, e);
}
if (current % 100 == 0) {
logger.info("Indexing workflow {}/{} ({} percent done)", current, total, current * 100 / total);
}
}
logger.info("Finished populating the workflow search index");
}
}
Aggregations