use of org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback in project alfresco-remote-api by Alfresco.
the class RepositoryContainer method transactionedExecute.
/**
* Execute script within required level of transaction
*
* @param script WebScript
* @param scriptReq WebScriptRequest
* @param scriptRes WebScriptResponse
* @throws IOException
*/
protected void transactionedExecute(final WebScript script, final WebScriptRequest scriptReq, final WebScriptResponse scriptRes) throws IOException {
try {
final Description description = script.getDescription();
if (description.getRequiredTransaction() == RequiredTransaction.none) {
script.execute(scriptReq, scriptRes);
} else {
final BufferedRequest bufferedReq;
final BufferedResponse bufferedRes;
RequiredTransactionParameters trxParams = description.getRequiredTransactionParameters();
if (trxParams.getCapability() == TransactionCapability.readwrite) {
if (trxParams.getBufferSize() > 0) {
if (logger.isDebugEnabled())
logger.debug("Creating Transactional Response for ReadWrite transaction; buffersize=" + trxParams.getBufferSize());
// create buffered request and response that allow transaction retrying
bufferedReq = new BufferedRequest(scriptReq, streamFactory);
bufferedRes = new BufferedResponse(scriptRes, trxParams.getBufferSize());
} else {
if (logger.isDebugEnabled())
logger.debug("Transactional Response bypassed for ReadWrite - buffersize=0");
bufferedReq = null;
bufferedRes = null;
}
} else {
bufferedReq = null;
bufferedRes = null;
}
// encapsulate script within transaction
RetryingTransactionCallback<Object> work = new RetryingTransactionCallback<Object>() {
public Object execute() throws Exception {
try {
if (logger.isDebugEnabled())
logger.debug("Begin retry transaction block: " + description.getRequiredTransaction() + "," + description.getRequiredTransactionParameters().getCapability());
if (bufferedRes == null) {
script.execute(scriptReq, scriptRes);
} else {
// Reset the request and response in case of a transaction retry
bufferedReq.reset();
bufferedRes.reset();
script.execute(bufferedReq, bufferedRes);
}
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.debug("Transaction exception: " + description.getRequiredTransaction() + ": " + e.getMessage());
// Note: user transaction shouldn't be null, but just in case inside this exception handler
UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
if (userTrx != null) {
logger.debug("Transaction status: " + userTrx.getStatus());
}
}
UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
if (userTrx != null) {
if (userTrx.getStatus() != Status.STATUS_MARKED_ROLLBACK) {
if (logger.isDebugEnabled())
logger.debug("Marking web script transaction for rollback");
try {
userTrx.setRollbackOnly();
} catch (Throwable re) {
if (logger.isDebugEnabled())
logger.debug("Caught and ignoring exception during marking for rollback: " + re.getMessage());
}
}
}
// re-throw original exception for retry
throw e;
} finally {
if (logger.isDebugEnabled())
logger.debug("End retry transaction block: " + description.getRequiredTransaction() + "," + description.getRequiredTransactionParameters().getCapability());
}
return null;
}
};
boolean readonly = description.getRequiredTransactionParameters().getCapability() == TransactionCapability.readonly;
boolean requiresNew = description.getRequiredTransaction() == RequiredTransaction.requiresnew;
// NOT have any side effects so this scenario as a warning sign something maybe amiss, see ALF-10179.
if (logger.isDebugEnabled() && !readonly && "GET".equalsIgnoreCase(description.getMethod())) {
logger.debug("Webscript with URL '" + scriptReq.getURL() + "' is a GET request but it's descriptor has declared a readwrite transaction is required");
}
try {
RetryingTransactionHelper transactionHelper = transactionService.getRetryingTransactionHelper();
if (script instanceof LoginPost) {
// login script requires read-write transaction because of authorization intercepter
transactionHelper.setForceWritable(true);
}
transactionHelper.doInTransaction(work, readonly, requiresNew);
} catch (TooBusyException e) {
// Map TooBusyException to a 503 status code
throw new WebScriptException(HttpServletResponse.SC_SERVICE_UNAVAILABLE, e.getMessage(), e);
} finally {
// Get rid of any temporary files
if (bufferedReq != null) {
bufferedReq.close();
}
}
// Ensure a response is always flushed after successful execution
if (bufferedRes != null) {
bufferedRes.writeResponse();
}
}
} catch (IOException ioe) {
Throwable socketException = ExceptionStackUtil.getCause(ioe, SocketException.class);
Class<?> clientAbortException = null;
try {
clientAbortException = Class.forName("org.apache.catalina.connector.ClientAbortException");
} catch (ClassNotFoundException e) {
// do nothing
}
// Note: if you need to look for more exceptions in the stack, then create a static array and pass it in
if ((socketException != null && socketException.getMessage().contains("Broken pipe")) || (clientAbortException != null && ExceptionStackUtil.getCause(ioe, clientAbortException) != null)) {
if (logger.isDebugEnabled()) {
logger.warn("Client has cut off communication", ioe);
} else {
logger.info("Client has cut off communication");
}
} else {
throw ioe;
}
}
}
use of org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback in project alfresco-remote-api by Alfresco.
the class NodeBrowserPost method executeImpl.
@Override
protected Map<String, Object> executeImpl(WebScriptRequest req, Status status, Cache cache) {
Map<String, Object> result = new HashMap<>(16);
// gather inputs
Map<String, String> returnParams = new HashMap<>(16);
String store = req.getParameter("nodebrowser-store");
String searcher = req.getParameter("nodebrowser-search");
String query = req.getParameter("nodebrowser-query");
String maxResults = req.getParameter("nodebrowser-query-maxresults");
String skipCount = req.getParameter("nodebrowser-query-skipcount");
String error = null;
StoreRef storeRef = new StoreRef(store);
// always a list of assoc refs from some result
List<ChildAssociationRef> assocRefs = Collections.<ChildAssociationRef>emptyList();
NodeRef currentNode = null;
// what action should be processed?
long timeStart = System.currentTimeMillis();
String actionValue = req.getParameter("nodebrowser-action-value");
String action = req.getParameter("nodebrowser-action");
final String execute = req.getParameter("nodebrowser-execute");
final String executeValue = req.getParameter("nodebrowser-execute-value");
String message = null;
try {
// this is done before the view action to ensure node state is correct
if (execute != null) {
switch(execute) {
case "delete":
{
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
// delete the node using the standard NodeService
nodeService.deleteNode(new NodeRef(executeValue));
return null;
}
}, false, true);
message = "nodebrowser.message.delete";
break;
}
case "fdelete":
{
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
// delete the node - but ensure that it is not archived
NodeRef ref = new NodeRef(executeValue);
nodeService.addAspect(ref, ContentModel.ASPECT_TEMPORARY, null);
nodeService.deleteNode(ref);
return null;
}
}, false, true);
message = "nodebrowser.message.delete";
break;
}
case "restore":
{
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
nodeService.restoreNode(new NodeRef(executeValue), null, null, null);
return null;
}
}, false, true);
message = "nodebrowser.message.restore";
break;
}
case "take-ownership":
{
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
ownableService.takeOwnership(new NodeRef(executeValue));
return null;
}
}, false, true);
message = "nodebrowser.message.take-ownership";
break;
}
case "delete-permissions":
{
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
NodeRef ref = new NodeRef(executeValue);
permissionService.deletePermissions(ref);
permissionService.setInheritParentPermissions(ref, true);
return null;
}
}, false, true);
message = "nodebrowser.message.delete-permissions";
break;
}
case "delete-property":
{
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
// argument value contains "NodeRef|QName" packed string
String[] parts = executeValue.split("\\|");
nodeService.removeProperty(new NodeRef(parts[0]), QName.createQName(parts[1]));
return null;
}
}, false, true);
message = "nodebrowser.message.delete-property";
break;
}
case "unlock":
{
transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
NodeRef ref = new NodeRef(executeValue);
if (cociService.isCheckedOut(ref)) {
NodeRef wcRef = cociService.getWorkingCopy(ref);
if (wcRef != null) {
cociService.cancelCheckout(wcRef);
}
} else {
lockService.unlock(ref);
}
return null;
}
}, false, true);
message = "nodebrowser.message.unlock";
break;
}
}
}
// the 'actionValue' param provides context as may other parameters such as 'query'
switch(action) {
// on Execute btn press and query present, perform search
case "search":
{
if (query != null && query.trim().length() != 0) {
switch(searcher) {
case "noderef":
{
// ensure node exists - or throw error
NodeRef nodeRef = new NodeRef(query);
boolean exists = getNodeService().exists(nodeRef);
if (!exists) {
throw new AlfrescoRuntimeException("Node " + nodeRef + " does not exist.");
}
currentNode = nodeRef;
// this is not really a search for results, it is a direct node reference
// so gather the child assocs as usual and update the action value for the UI location
assocRefs = getNodeService().getChildAssocs(currentNode);
actionValue = query;
action = "parent";
break;
}
case "selectnodes":
{
List<NodeRef> nodes = getSearchService().selectNodes(getNodeService().getRootNode(storeRef), query, null, getNamespaceService(), false);
assocRefs = new ArrayList<>(nodes.size());
for (NodeRef node : nodes) {
assocRefs.add(getNodeService().getPrimaryParent(node));
}
break;
}
default:
{
// perform search
SearchParameters params = new SearchParameters();
params.setQuery(query);
params.addStore(storeRef);
params.setLanguage(searcher);
if (maxResults != null && maxResults.length() != 0) {
params.setMaxItems(Integer.parseInt(maxResults));
params.setLimit(Integer.parseInt(maxResults));
}
if (skipCount != null && skipCount.length() != 0) {
params.setSkipCount(Integer.parseInt(skipCount));
}
ResultSet rs = getSearchService().query(params);
assocRefs = rs.getChildAssocRefs();
break;
}
}
}
break;
}
case "root":
{
// iterate the properties and children of a store root node
currentNode = getNodeService().getRootNode(storeRef);
assocRefs = getNodeService().getChildAssocs(currentNode);
break;
}
case "parent":
case "children":
{
currentNode = new NodeRef(actionValue);
assocRefs = getNodeService().getChildAssocs(currentNode);
break;
}
}
// get the required information from the assocRefs list and wrap objects
List<ChildAssocRefWrapper> wrappers = new ArrayList<>(assocRefs.size());
for (ChildAssociationRef ref : assocRefs) {
wrappers.add(new ChildAssocRefWrapper(ref));
}
result.put("children", wrappers);
} catch (Throwable e) {
// empty child list on error - current node will still be null
result.put("children", new ArrayList<>(0));
error = e.getMessage();
}
// current node info if any
if (currentNode != null) {
// node info
Map<String, Object> info = new HashMap<>(8);
info.put("nodeRef", currentNode.toString());
info.put("path", getNodeService().getPath(currentNode).toPrefixString(getNamespaceService()));
info.put("type", getNodeService().getType(currentNode).toPrefixString(getNamespaceService()));
ChildAssociationRef parent = getNodeService().getPrimaryParent(currentNode);
info.put("parent", parent.getParentRef() != null ? parent.getParentRef().toString() : "");
result.put("info", info);
// node properties
result.put("properties", getProperties(currentNode));
// parents
List<ChildAssociationRef> parents = getNodeService().getParentAssocs(currentNode);
List<ChildAssociation> assocs = new ArrayList<ChildAssociation>(parents.size());
for (ChildAssociationRef ref : parents) {
assocs.add(new ChildAssociation(ref));
}
result.put("parents", assocs);
// aspects
List<Aspect> aspects = getAspects(currentNode);
result.put("aspects", aspects);
// target assocs
List<PeerAssociation> targetAssocs = getAssocs(currentNode);
result.put("assocs", targetAssocs);
// source assocs
List<PeerAssociation> sourceAssocs = getSourceAssocs(currentNode);
result.put("sourceAssocs", sourceAssocs);
// permissions
Map<String, Object> permissionInfo = new HashMap<String, Object>();
permissionInfo.put("entries", getPermissions(currentNode));
permissionInfo.put("owner", getOwnableService().getOwner(currentNode));
permissionInfo.put("inherit", getInheritPermissions(currentNode));
result.put("permissions", permissionInfo);
}
// store result in session for the resulting GET request webscript
final String resultId = GUID.generate();
HttpServletRequest request = ((WebScriptServletRequest) req).getHttpServletRequest();
HttpSession session = request.getSession();
session.putValue(resultId, result);
// return params
returnParams.put("resultId", resultId);
returnParams.put("action", action);
returnParams.put("actionValue", actionValue);
returnParams.put("query", query);
returnParams.put("store", store);
returnParams.put("searcher", searcher);
returnParams.put("maxResults", maxResults);
returnParams.put("skipCount", skipCount);
returnParams.put("in", Long.toString(System.currentTimeMillis() - timeStart));
returnParams.put("e", error);
returnParams.put("m", message);
// redirect as all admin console pages do (follow standard pattern)
// The logic to generate the navigation section and server meta-data is all tied into alfresco-common.lib.js
// which is great for writing JS based JMX surfaced pages, but not so great for Java backed WebScripts.
status.setCode(301);
status.setRedirect(true);
status.setLocation(buildUrl(req, returnParams, execute != null && execute.length() != 0 ? execute : action));
return null;
}
use of org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback in project alfresco-remote-api by Alfresco.
the class UserCSVUploadPost method executeImpl.
/**
* @see DeclarativeWebScript#executeImpl(org.springframework.extensions.webscripts.WebScriptRequest, org.springframework.extensions.webscripts.Status)
*/
@Override
protected Map<String, Object> executeImpl(WebScriptRequest req, Status status) {
final List<Map<QName, String>> users = new ArrayList<Map<QName, String>>();
final ResourceBundle rb = getResources();
// Try to load the user details from the upload
FormData form = (FormData) req.parseContent();
if (form == null || !form.getIsMultiPart()) {
throw new ResourceBundleWebScriptException(Status.STATUS_BAD_REQUEST, rb, ERROR_BAD_FORM);
}
boolean processed = false;
for (FormData.FormField field : form.getFields()) {
if (field.getIsFile()) {
processUpload(field.getInputStream(), field.getFilename(), users);
processed = true;
break;
}
}
if (!processed) {
throw new ResourceBundleWebScriptException(Status.STATUS_BAD_REQUEST, rb, ERROR_NO_FILE);
}
// Should we send emails?
boolean sendEmails = true;
if (req.getParameter("email") != null) {
sendEmails = Boolean.parseBoolean(req.getParameter("email"));
}
if (form.hasField("email")) {
sendEmails = Boolean.parseBoolean(form.getParameters().get("email")[0]);
}
// Now process the users
final MutableInt totalUsers = new MutableInt(0);
final MutableInt addedUsers = new MutableInt(0);
final Map<String, String> results = new HashMap<String, String>();
final boolean doSendEmails = sendEmails;
// Do the work in a new transaction, so that if we hit a problem
// during the commit stage (eg too many users) then we get to
// hear about it, and handle it ourselves.
// Otherwise, commit exceptions occur deep inside RepositoryContainer
// and we can't control the status code
RetryingTransactionCallback<Void> work = new RetryingTransactionCallback<Void>() {
public Void execute() throws Throwable {
try {
doAddUsers(totalUsers, addedUsers, results, users, rb, doSendEmails);
return null;
} catch (Throwable t) {
// Make sure we rollback from this
UserTransaction userTrx = RetryingTransactionHelper.getActiveUserTransaction();
if (userTrx != null && userTrx.getStatus() != javax.transaction.Status.STATUS_MARKED_ROLLBACK) {
try {
userTrx.setRollbackOnly();
} catch (Throwable t2) {
}
}
// Report the problem further down
throw t;
}
}
};
try {
retryingTransactionHelper.doInTransaction(work);
} catch (Throwable t) {
// Tell the client of the problem
if (t instanceof WebScriptException) {
// We've already wrapped it properly, all good
throw (WebScriptException) t;
} else {
// Return the details with a 200, so that Share does the right thing
throw new ResourceBundleWebScriptException(Status.STATUS_OK, rb, ERROR_GENERAL, t);
}
}
// If we get here, then adding the users didn't throw any exceptions,
// so tell the client which users went in and which didn't
Map<String, Object> model = new HashMap<String, Object>();
model.put("totalUsers", totalUsers);
model.put("addedUsers", addedUsers);
model.put("users", results);
return model;
}
use of org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback in project alfresco-remote-api by Alfresco.
the class WebDAVLockServiceImpl method sessionDestroyed.
@Override
@SuppressWarnings("unchecked")
public void sessionDestroyed() {
HttpSession session = currentSession.get();
if (session == null) {
if (logger.isDebugEnabled()) {
logger.debug("Couldn't find current session.");
}
return;
}
// look for locked documents list in http session
final List<Pair<String, NodeRef>> lockedResources = (List<Pair<String, NodeRef>>) session.getAttribute(LOCKED_RESOURCES);
if (lockedResources != null && lockedResources.size() > 0) {
if (logger.isDebugEnabled()) {
logger.debug("Found " + lockedResources.size() + " locked resources for session: " + session.getId());
}
for (Pair<String, NodeRef> lockedResource : lockedResources) {
String runAsUser = lockedResource.getFirst();
final NodeRef nodeRef = lockedResource.getSecond();
// there are some document that should be forcibly unlocked
AuthenticationUtil.runAs(new RunAsWork<Void>() {
@Override
public Void doWork() throws Exception {
return transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
// check whether this document still exists in repo
if (nodeService.exists(nodeRef)) {
if (logger.isDebugEnabled()) {
logger.debug("Trying to release lock for: " + nodeRef);
}
// check the lock status of document
LockStatus lockStatus = lockService.getLockStatus(nodeRef);
// check if document was checked out
boolean hasWorkingCopy = checkOutCheckInService.getWorkingCopy(nodeRef) != null;
boolean isWorkingCopy = nodeService.hasAspect(nodeRef, ContentModel.ASPECT_WORKING_COPY);
// forcibly unlock document if it is still locked and not checked out
if ((lockStatus.equals(LockStatus.LOCKED) || lockStatus.equals(LockStatus.LOCK_OWNER)) && !hasWorkingCopy && !isWorkingCopy) {
try {
// try to unlock it
lockService.unlock(nodeRef);
if (logger.isDebugEnabled()) {
logger.debug("Lock was successfully released for: " + nodeRef);
}
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.debug("Unable to unlock " + nodeRef + " cause: " + e.getMessage());
}
}
} else {
// document is not locked or is checked out
if (logger.isDebugEnabled()) {
logger.debug("Skip lock releasing for: " + nodeRef + " as it is not locked or is checked out");
}
}
} else {
// document no longer exists in repo
if (logger.isDebugEnabled()) {
logger.debug("Skip lock releasing for an unexisting node: " + nodeRef);
}
}
return null;
}
}, transactionService.isReadOnly());
}
}, runAsUser == null ? AuthenticationUtil.getSystemUserName() : runAsUser);
}
} else {
// there are no documents with unexpected lock left on it
if (logger.isDebugEnabled()) {
logger.debug("No locked resources were found for session: " + session.getId());
}
}
}
use of org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback in project alfresco-remote-api by Alfresco.
the class LockMethod method attemptLock.
/**
* The main lock implementation method.
*
* @throws WebDAVServerException
* @throws Exception
*/
protected void attemptLock() throws WebDAVServerException, Exception {
FileFolderService fileFolderService = getFileFolderService();
final String path = getPath();
NodeRef rootNodeRef = getRootNodeRef();
// Get the active user
final String userName = getDAVHelper().getAuthenticationService().getCurrentUserName();
if (logger.isDebugEnabled()) {
logger.debug("Locking node: \n" + " user: " + userName + "\n" + " path: " + path);
}
FileInfo lockNodeInfo = null;
try {
// Check if the path exists
lockNodeInfo = getNodeForPath(getRootNodeRef(), getPath());
} catch (FileNotFoundException e) {
if (m_conditions != null) {
// MNT-12303 fix, check whether this is a refresh lock request
for (Condition condition : m_conditions) {
List<String> lockTolensMatch = condition.getLockTokensMatch();
List<String> etagsMatch = condition.getETagsMatch();
if (m_request.getContentLength() == -1 && (lockTolensMatch != null && !lockTolensMatch.isEmpty()) || (etagsMatch != null && !etagsMatch.isEmpty())) {
// so there is nothing to refresh. Return 403 Forbidden as original SharePoint Server.
throw new WebDAVServerException(HttpServletResponse.SC_FORBIDDEN);
}
}
}
// need to create it
String[] splitPath = getDAVHelper().splitPath(path);
// check
if (splitPath[1].length() == 0) {
throw new WebDAVServerException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
FileInfo dirInfo = null;
List<String> dirPathElements = getDAVHelper().splitAllPaths(splitPath[0]);
if (dirPathElements.size() == 0) {
// if there are no path elements we are at the root so get the root node
dirInfo = fileFolderService.getFileInfo(getRootNodeRef());
} else {
// make sure folder structure is present
dirInfo = FileFolderUtil.makeFolders(fileFolderService, rootNodeRef, dirPathElements, ContentModel.TYPE_FOLDER);
}
if (dirInfo == null) {
throw new WebDAVServerException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
// create the file
lockNodeInfo = createNode(dirInfo.getNodeRef(), splitPath[1], ContentModel.TYPE_CONTENT);
// ALF-10309 fix, mark created node with webdavNoContent aspect, we assume that save operation
// is performed by client, webdavNoContent aspect normally removed in put method unless there
// is a cancel before the PUT request takes place
int lockTimeout = getLockTimeout();
if (lockTimeout > 0 && !getNodeService().hasAspect(lockNodeInfo.getNodeRef(), ContentModel.ASPECT_WEBDAV_NO_CONTENT)) {
final NodeRef nodeRef = lockNodeInfo.getNodeRef();
getNodeService().addAspect(nodeRef, ContentModel.ASPECT_WEBDAV_NO_CONTENT, null);
// Remove node after the timeout (MS Office 2003 requests 3 minutes) if the PUT or UNLOCK has not taken place
timer.schedule(new TimerTask() {
@Override
public void run() {
// run as current user
AuthenticationUtil.runAs(new RunAsWork<Void>() {
@Override
public Void doWork() throws Exception {
try {
if (getNodeService().hasAspect(nodeRef, ContentModel.ASPECT_WEBDAV_NO_CONTENT)) {
getTransactionService().getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<String>() {
public String execute() throws Throwable {
getNodeService().deleteNode(nodeRef);
if (logger.isDebugEnabled()) {
logger.debug("Timer DELETE " + path);
}
return null;
}
}, false, true);
} else if (logger.isDebugEnabled()) {
logger.debug("Timer IGNORE " + path);
}
} catch (InvalidNodeRefException e) {
// Might get this if the node is deleted. If so just ignore.
if (logger.isDebugEnabled()) {
logger.debug("Timer DOES NOT EXIST " + path);
}
}
return null;
}
}, userName);
}
}, lockTimeout * 1000);
if (logger.isDebugEnabled()) {
logger.debug("Timer START in " + lockTimeout + " seconds " + path);
}
}
if (logger.isDebugEnabled()) {
logger.debug("Created new node for lock: \n" + " path: " + path + "\n" + " node: " + lockNodeInfo);
}
m_response.setStatus(HttpServletResponse.SC_CREATED);
}
// Check if this is a new lock or a lock refresh
if (hasLockToken()) {
lockInfo = checkNode(lockNodeInfo);
if (!lockInfo.isLocked() && m_request.getContentLength() == -1) {
// see http://www.ics.uci.edu/~ejw/authoring/protocol/rfc2518.html#rfc.section.7.8
throw new WebDAVServerException(HttpServletResponse.SC_BAD_REQUEST);
}
// If a request body is not defined and "If" header is sent we have createExclusive as false,
// but we need to check a previous LOCK was an exclusive. I.e. get the property for node. It
// is already has got in a checkNode method, so we need just get a scope from lockInfo.
// This particular case was raised as ALF-4008.
this.createExclusive = WebDAV.XML_EXCLUSIVE.equals(this.lockInfo.getScope());
// Refresh an existing lock
refreshLock(lockNodeInfo, userName);
} else {
lockInfo = checkNode(lockNodeInfo, true, createExclusive);
// Create a new lock
createLock(lockNodeInfo, userName);
}
m_response.setHeader(WebDAV.HEADER_LOCK_TOKEN, "<" + WebDAV.makeLockToken(lockNodeInfo.getNodeRef(), userName) + ">");
m_response.setHeader(WebDAV.HEADER_CONTENT_TYPE, WebDAV.XML_CONTENT_TYPE);
// We either created a new lock or refreshed an existing lock, send back the lock details
generateResponse(lockNodeInfo, userName);
}
Aggregations