use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class WorkflowServiceImpl method update.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.WorkflowService#update(org.opencastproject.workflow.api.WorkflowInstance)
*/
@Override
public void update(final WorkflowInstance workflowInstance) throws WorkflowException, UnauthorizedException {
final Lock lock = updateLock.get(workflowInstance.getId());
lock.lock();
try {
WorkflowInstance originalWorkflowInstance = null;
try {
originalWorkflowInstance = getWorkflowById(workflowInstance.getId());
} catch (NotFoundException e) {
// That's fine, it's a new workflow instance
}
if (originalWorkflowInstance != null) {
try {
assertPermission(originalWorkflowInstance, Permissions.Action.WRITE.toString());
} catch (MediaPackageException e) {
throw new WorkflowParsingException(e);
}
}
MediaPackage updatedMediaPackage = null;
try {
// Before we persist this, extract the metadata
updatedMediaPackage = workflowInstance.getMediaPackage();
populateMediaPackageMetadata(updatedMediaPackage);
String seriesId = updatedMediaPackage.getSeries();
if (seriesId != null && workflowInstance.getCurrentOperation() != null) {
// If the mediapackage contains a series, find the series ACLs and add the security information to the
// mediapackage
AccessControlList acl = seriesService.getSeriesAccessControl(seriesId);
Option<AccessControlList> activeSeriesAcl = authorizationService.getAcl(updatedMediaPackage, AclScope.Series);
if (activeSeriesAcl.isNone() || !AccessControlUtil.equals(activeSeriesAcl.get(), acl))
authorizationService.setAcl(updatedMediaPackage, AclScope.Series, acl);
}
} catch (SeriesException e) {
throw new WorkflowDatabaseException(e);
} catch (NotFoundException e) {
logger.warn("Metadata for mediapackage {} could not be updated because it wasn't found", updatedMediaPackage, e);
} catch (Exception e) {
logger.error("Metadata for mediapackage {} could not be updated", updatedMediaPackage, e);
}
// Synchronize the job status with the workflow
WorkflowState workflowState = workflowInstance.getState();
String xml;
try {
xml = WorkflowParser.toXml(workflowInstance);
} catch (Exception e) {
// Can't happen, since we are converting from an in-memory object
throw new IllegalStateException("In-memory workflow instance could not be serialized", e);
}
Job job = null;
try {
job = serviceRegistry.getJob(workflowInstance.getId());
job.setPayload(xml);
// Synchronize workflow and job state
switch(workflowState) {
case FAILED:
job.setStatus(Status.FAILED);
break;
case FAILING:
break;
case INSTANTIATED:
job.setDispatchable(true);
job.setStatus(Status.QUEUED);
break;
case PAUSED:
job.setStatus(Status.PAUSED);
break;
case RUNNING:
job.setStatus(Status.RUNNING);
break;
case STOPPED:
job.setStatus(Status.CANCELED);
break;
case SUCCEEDED:
job.setStatus(Status.FINISHED);
break;
default:
throw new IllegalStateException("Found a workflow state that is not handled");
}
} catch (ServiceRegistryException e) {
logger.error(e, "Unable to read workflow job %s from service registry", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
} catch (NotFoundException e) {
logger.error("Job for workflow %s not found in service registry", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
}
// Update both workflow and workflow job
try {
job = serviceRegistry.updateJob(job);
messageSender.sendObjectMessage(WorkflowItem.WORKFLOW_QUEUE, MessageSender.DestinationType.Queue, WorkflowItem.updateInstance(workflowInstance));
index(workflowInstance);
} catch (ServiceRegistryException e) {
logger.error("Update of workflow job %s in the service registry failed, service registry and workflow index may be out of sync", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
} catch (NotFoundException e) {
logger.error("Job for workflow %s not found in service registry", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
} catch (Exception e) {
logger.error("Update of workflow job %s in the service registry failed, service registry and workflow index may be out of sync", job.getId());
throw new WorkflowException(e);
}
if (workflowStatsCollect) {
workflowsStatistics.updateWorkflow(getBeanStatistics(), getHoldWorkflows());
}
try {
WorkflowInstance clone = WorkflowParser.parseWorkflowInstance(WorkflowParser.toXml(workflowInstance));
fireListeners(originalWorkflowInstance, clone);
} catch (Exception e) {
// Can't happen, since we are converting from an in-memory object
throw new IllegalStateException("In-memory workflow instance could not be serialized", e);
}
} finally {
lock.unlock();
}
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class WorkflowServiceImpl method runWorkflow.
/**
* Executes the workflow.
*
* @param workflow
* the workflow instance
* @throws WorkflowException
* if there is a problem processing the workflow
*/
protected Job runWorkflow(WorkflowInstance workflow) throws WorkflowException, UnauthorizedException {
if (!INSTANTIATED.equals(workflow.getState())) {
// updated accordingly.
if (RUNNING.equals(workflow.getState())) {
WorkflowOperationInstance currentOperation = workflow.getCurrentOperation();
if (currentOperation != null) {
if (currentOperation.getId() != null) {
try {
Job operationJob = serviceRegistry.getJob(currentOperation.getId());
if (Job.Status.RUNNING.equals(operationJob.getStatus())) {
logger.debug("Not starting workflow %s, it is already in running state", workflow);
return null;
} else {
logger.info("Scheduling next operation of workflow %s", workflow);
operationJob.setStatus(Status.QUEUED);
operationJob.setDispatchable(true);
return serviceRegistry.updateJob(operationJob);
}
} catch (Exception e) {
logger.warn("Error determining status of current workflow operation in {}: {}", workflow, e.getMessage());
return null;
}
}
} else {
throw new IllegalStateException("Cannot start a workflow '" + workflow + "' with no current operation");
}
} else {
throw new IllegalStateException("Cannot start a workflow in state '" + workflow.getState() + "'");
}
}
// If this is a new workflow, move to the first operation
workflow.setState(RUNNING);
update(workflow);
WorkflowOperationInstance operation = workflow.getCurrentOperation();
if (operation == null)
throw new IllegalStateException("Cannot start a workflow without a current operation");
if (operation.getPosition() != 0)
throw new IllegalStateException("Current operation expected to be first");
try {
logger.info("Scheduling workflow %s for execution", workflow.getId());
Job job = serviceRegistry.createJob(JOB_TYPE, Operation.START_OPERATION.toString(), Arrays.asList(Long.toString(workflow.getId())), null, false, null, WORKFLOW_JOB_LOAD);
operation.setId(job.getId());
update(workflow);
job.setStatus(Status.QUEUED);
job.setDispatchable(true);
return serviceRegistry.updateJob(job);
} catch (ServiceRegistryException e) {
throw new WorkflowDatabaseException(e);
} catch (NotFoundException e) {
// this should be impossible
throw new IllegalStateException("Unable to find a job that was just created");
}
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class WorkflowServiceImpl method process.
/**
* Processes the workflow job.
*
* @param job
* the job
* @return the job payload
* @throws Exception
* if job processing fails
*/
protected String process(Job job) throws Exception {
List<String> arguments = job.getArguments();
Operation op = null;
WorkflowInstance workflowInstance = null;
WorkflowOperationInstance wfo = null;
String operation = job.getOperation();
try {
try {
op = Operation.valueOf(operation);
switch(op) {
case START_WORKFLOW:
workflowInstance = WorkflowParser.parseWorkflowInstance(job.getPayload());
logger.debug("Starting new workflow %s", workflowInstance);
runWorkflow(workflowInstance);
break;
case RESUME:
workflowInstance = getWorkflowById(Long.parseLong(arguments.get(0)));
wfo = workflowInstance.getCurrentOperation();
Map<String, String> properties = null;
if (arguments.size() > 1) {
Properties props = new Properties();
props.load(IOUtils.toInputStream(arguments.get(arguments.size() - 1)));
properties = new HashMap<String, String>();
for (Entry<Object, Object> entry : props.entrySet()) {
properties.put(entry.getKey().toString(), entry.getValue().toString());
}
}
logger.debug("Resuming %s at %s", workflowInstance, workflowInstance.getCurrentOperation());
workflowInstance.setState(RUNNING);
update(workflowInstance);
wfo = runWorkflowOperation(workflowInstance, properties);
break;
case START_OPERATION:
workflowInstance = getWorkflowById(Long.parseLong(arguments.get(0)));
wfo = workflowInstance.getCurrentOperation();
if (OperationState.RUNNING.equals(wfo.getState()) || OperationState.PAUSED.equals(wfo.getState())) {
logger.info("Reset operation state %s %s to INSTANTIATED due to job restart", workflowInstance, wfo);
wfo.setState(OperationState.INSTANTIATED);
}
wfo.setExecutionHost(job.getProcessingHost());
logger.debug("Running %s %s", workflowInstance, wfo);
wfo = runWorkflowOperation(workflowInstance, null);
updateOperationJob(job.getId(), wfo.getState());
break;
default:
throw new IllegalStateException("Don't know how to handle operation '" + operation + "'");
}
} catch (IllegalArgumentException e) {
throw new ServiceRegistryException("This service can't handle operations of type '" + op + "'", e);
} catch (IndexOutOfBoundsException e) {
throw new ServiceRegistryException("This argument list for operation '" + op + "' does not meet expectations", e);
} catch (NotFoundException e) {
logger.warn(e.getMessage());
updateOperationJob(job.getId(), OperationState.FAILED);
}
return null;
} catch (Exception e) {
logger.warn(e, "Exception while accepting job " + job);
try {
if (workflowInstance != null) {
logger.warn("Marking job {} and workflow instance {} as failed", job, workflowInstance);
updateOperationJob(job.getId(), OperationState.FAILED);
workflowInstance.setState(FAILED);
update(workflowInstance);
} else {
logger.warn(e, "Unable to parse workflow instance");
}
} catch (WorkflowDatabaseException e1) {
throw new ServiceRegistryException(e1);
}
if (e instanceof ServiceRegistryException)
throw e;
throw new ServiceRegistryException("Error handling operation '" + op + "'", e);
}
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class WorkflowServiceSolrIndex method getStatistics.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.impl.WorkflowServiceIndex#getStatistics()
*/
@Override
public WorkflowStatistics getStatistics() throws WorkflowDatabaseException {
long total = 0;
long paused = 0;
long failed = 0;
long failing = 0;
long instantiated = 0;
long running = 0;
long stopped = 0;
long succeeded = 0;
WorkflowStatistics stats = new WorkflowStatistics();
// Get all definitions and then query for the numbers and the current operation per definition
try {
String orgId = securityService.getOrganization().getId();
StringBuilder queryString = new StringBuilder().append(ORG_KEY).append(":").append(escapeQueryChars(orgId));
appendSolrAuthFragment(queryString, Permissions.Action.WRITE.toString());
SolrQuery solrQuery = new SolrQuery(queryString.toString());
solrQuery.addFacetField(WORKFLOW_DEFINITION_KEY);
solrQuery.addFacetField(OPERATION_KEY);
solrQuery.setFacetMinCount(0);
solrQuery.setFacet(true);
QueryResponse response = solrServer.query(solrQuery);
FacetField templateFacet = response.getFacetField(WORKFLOW_DEFINITION_KEY);
FacetField operationFacet = response.getFacetField(OPERATION_KEY);
// For every template and every operation
if (templateFacet != null && templateFacet.getValues() != null) {
for (Count template : templateFacet.getValues()) {
WorkflowDefinitionReport templateReport = new WorkflowDefinitionReport();
templateReport.setId(template.getName());
long templateTotal = 0;
long templatePaused = 0;
long templateFailed = 0;
long templateFailing = 0;
long templateInstantiated = 0;
long templateRunning = 0;
long templateStopped = 0;
long templateSucceeded = 0;
if (operationFacet != null && operationFacet.getValues() != null) {
for (Count operation : operationFacet.getValues()) {
OperationReport operationReport = new OperationReport();
operationReport.setId(operation.getName());
StringBuilder baseSolrQuery = new StringBuilder().append(ORG_KEY).append(":").append(escapeQueryChars(orgId));
appendSolrAuthFragment(baseSolrQuery, Permissions.Action.WRITE.toString());
solrQuery = new SolrQuery(baseSolrQuery.toString());
solrQuery.addFacetField(STATE_KEY);
solrQuery.addFacetQuery(STATE_KEY + ":" + WorkflowState.FAILED);
solrQuery.addFacetQuery(STATE_KEY + ":" + WorkflowState.FAILING);
solrQuery.addFacetQuery(STATE_KEY + ":" + WorkflowState.INSTANTIATED);
solrQuery.addFacetQuery(STATE_KEY + ":" + WorkflowState.PAUSED);
solrQuery.addFacetQuery(STATE_KEY + ":" + WorkflowState.RUNNING);
solrQuery.addFacetQuery(STATE_KEY + ":" + WorkflowState.STOPPED);
solrQuery.addFacetQuery(STATE_KEY + ":" + WorkflowState.SUCCEEDED);
solrQuery.addFilterQuery(WORKFLOW_DEFINITION_KEY + ":" + template.getName());
solrQuery.addFilterQuery(OPERATION_KEY + ":" + operation.getName());
solrQuery.setFacetMinCount(0);
solrQuery.setFacet(true);
response = solrServer.query(solrQuery);
// Add the states
FacetField stateFacet = response.getFacetField(STATE_KEY);
for (Count stateValue : stateFacet.getValues()) {
WorkflowState state = WorkflowState.valueOf(stateValue.getName().toUpperCase());
templateTotal += stateValue.getCount();
total += stateValue.getCount();
switch(state) {
case FAILED:
operationReport.setFailed(stateValue.getCount());
templateFailed += stateValue.getCount();
failed += stateValue.getCount();
break;
case FAILING:
operationReport.setFailing(stateValue.getCount());
templateFailing += stateValue.getCount();
failing += stateValue.getCount();
break;
case INSTANTIATED:
operationReport.setInstantiated(stateValue.getCount());
templateInstantiated += stateValue.getCount();
instantiated += stateValue.getCount();
break;
case PAUSED:
operationReport.setPaused(stateValue.getCount());
templatePaused += stateValue.getCount();
paused += stateValue.getCount();
break;
case RUNNING:
operationReport.setRunning(stateValue.getCount());
templateRunning += stateValue.getCount();
running += stateValue.getCount();
break;
case STOPPED:
operationReport.setStopped(stateValue.getCount());
templateStopped += stateValue.getCount();
stopped += stateValue.getCount();
break;
case SUCCEEDED:
operationReport.setFinished(stateValue.getCount());
templateSucceeded += stateValue.getCount();
succeeded += stateValue.getCount();
break;
default:
throw new IllegalStateException("State '" + state + "' is not handled");
}
}
templateReport.getOperations().add(operationReport);
}
}
// Update the template statistics
templateReport.setTotal(templateTotal);
templateReport.setFailed(templateFailed);
templateReport.setFailing(templateFailing);
templateReport.setInstantiated(templateInstantiated);
templateReport.setPaused(templatePaused);
templateReport.setRunning(templateRunning);
templateReport.setStopped(templateStopped);
templateReport.setFinished(templateSucceeded);
// Add the definition report to the statistics
stats.getDefinitions().add(templateReport);
}
}
} catch (SolrServerException e) {
throw new WorkflowDatabaseException(e);
}
stats.setTotal(total);
stats.setFailed(failed);
stats.setFailing(failing);
stats.setInstantiated(instantiated);
stats.setPaused(paused);
stats.setRunning(running);
stats.setStopped(stopped);
stats.setFinished(succeeded);
return stats;
}
use of org.opencastproject.workflow.api.WorkflowDatabaseException in project opencast by opencast.
the class WorkflowServiceSolrIndex method index.
public void index(final WorkflowInstance instance) throws WorkflowDatabaseException {
if (synchronousIndexing) {
try {
SolrInputDocument doc = createDocument(instance);
synchronized (solrServer) {
solrServer.add(doc);
solrServer.commit();
}
} catch (Exception e) {
throw new WorkflowDatabaseException("Unable to index workflow", e);
}
} else {
indexingExecutor.submit(new Runnable() {
@Override
public void run() {
try {
SolrInputDocument doc = createDocument(instance);
synchronized (solrServer) {
solrServer.add(doc);
// Use solr's autoCommit feature instead of committing on each document addition.
// See http://opencast.jira.com/browse/MH-7040 and
// http://osdir.com/ml/solr-user.lucene.apache.org/2009-09/msg00744.html
// solrServer.commit();
}
} catch (Exception e) {
WorkflowServiceSolrIndex.logger.warn("Unable to index {}: {}", instance, e);
}
}
});
}
}
Aggregations