@@ -122,7 +122,6 @@ public class PassthruCifsAuthenticator extends CifsAuthenticatorBase implements
m_sessions = new Hashtable();
}
-
public void setPassthruServers(PassthruServers servers)
{
m_passthruServers = servers;
@@ -782,6 +781,20 @@ public class PassthruCifsAuthenticator extends CifsAuthenticatorBase implements
if ( loggedOn == true) {
+ // Check for virtual circuit zero, disconnect any other sessions from this client
+
+ if ( vcNum == 0 && hasSessionCleanup()) {
+
+ // Disconnect other sessions from this client, cleanup any open files/locks/oplocks
+
+ int discCnt = sess.disconnectClientSessions();
+
+ // DEBUG
+
+ if ( discCnt > 0 && Debug.EnableInfo && sess.hasDebug(SMBSrvSession.DBG_NEGOTIATE))
+ Debug.println("[SMB] Disconnected " + discCnt + " existing sessions from client, sess=" + sess);
+ }
+
// Clear any stored session setup object for the logon
sess.removeSetupObject( client.getProcessId());
diff --git a/source/java/org/alfresco/filesys/auth/nfs/AlfrescoRpcAuthenticator.java b/source/java/org/alfresco/filesys/auth/nfs/AlfrescoRpcAuthenticator.java
index 0794f6fd22..1e5901a487 100644
--- a/source/java/org/alfresco/filesys/auth/nfs/AlfrescoRpcAuthenticator.java
+++ b/source/java/org/alfresco/filesys/auth/nfs/AlfrescoRpcAuthenticator.java
@@ -37,6 +37,7 @@ import org.alfresco.jlan.server.auth.ClientInfo;
import org.alfresco.jlan.server.config.InvalidConfigurationException;
import org.alfresco.jlan.server.config.ServerConfiguration;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
+import org.alfresco.repo.security.authentication.AuthenticationException;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.security.MutableAuthenticationService;
@@ -296,10 +297,42 @@ public class AlfrescoRpcAuthenticator implements RpcAuthenticator, InitializingB
AlfrescoClientInfo alfClient = (AlfrescoClientInfo) client;
- // Check if the authentication token has been set for the client
+ // Check if the authentication token has been set for the client
+
+ if ( alfClient.hasAuthenticationTicket()) {
+
+ // Check if the ticket is still valid
+
+ try {
+
+ // Set the authentication context for the request
+
+ getAuthenticationService().validate(alfClient.getAuthenticationTicket());
+
+ // DEBUG
+
+ if ( logger.isDebugEnabled())
+ logger.debug("Set user using auth ticket, ticket=" + alfClient.getAuthenticationTicket());
+ }
+ catch ( AuthenticationException ex) {
+
+ // Ticket not valid, re-authenticate the user
+
+ alfClient.setAuthenticationTicket( null);
+
+ // DEBUG
+
+ if ( logger.isDebugEnabled()) {
+ logger.debug("Failed to set user using auth ticket, ticket=" + alfClient.getAuthenticationTicket() + ", re-authenticating");
+ logger.debug(" Exception=" + ex.getMessage());
+ }
+ }
+ }
+
+ // Check if the authentication token has been set for the client
- if ( !alfClient.hasAuthenticationTicket() )
- {
+ if ( !alfClient.hasAuthenticationTicket() )
+ {
// ALF-9793: It's possible that the user we're about to accept doesn't even exist, yet we
// are using alfresco authentication. In such cases we must automatically create
// authentication (using a randomized password) in order to successfully authenticate.
@@ -317,18 +350,7 @@ public class AlfrescoRpcAuthenticator implements RpcAuthenticator, InitializingB
if ( logger.isDebugEnabled())
logger.debug("Set user name=" + client.getUserName() + ", ticket=" + alfClient.getAuthenticationTicket());
- }
- else
- {
- // Set the authentication context for the request
-
- getAuthenticationService().validate(alfClient.getAuthenticationTicket());
-
- // DEBUG
-
- if ( logger.isDebugEnabled())
- logger.debug("Set user using auth ticket, ticket=" + alfClient.getAuthenticationTicket());
- }
+ }
}
else
{
diff --git a/source/java/org/alfresco/filesys/repo/CacheLookupSearchContext.java b/source/java/org/alfresco/filesys/repo/CacheLookupSearchContext.java
index 24b89c5db6..4abf4bd413 100644
--- a/source/java/org/alfresco/filesys/repo/CacheLookupSearchContext.java
+++ b/source/java/org/alfresco/filesys/repo/CacheLookupSearchContext.java
@@ -63,9 +63,10 @@ public class CacheLookupSearchContext extends DotDotContentSearchContext {
String searchStr,
PseudoFileList pseudoList,
String relPath,
- FileStateCache stateCache)
+ FileStateCache stateCache,
+ boolean lockedFilesAsOffline)
{
- super(cifsHelper, results, searchStr, pseudoList, relPath);
+ super(cifsHelper, results, searchStr, pseudoList, relPath, lockedFilesAsOffline);
super.setSearchString(searchStr);
m_stateCache = stateCache;
diff --git a/source/java/org/alfresco/filesys/repo/CifsHelper.java b/source/java/org/alfresco/filesys/repo/CifsHelper.java
index 2b31c2d5c1..6ae21083a4 100644
--- a/source/java/org/alfresco/filesys/repo/CifsHelper.java
+++ b/source/java/org/alfresco/filesys/repo/CifsHelper.java
@@ -39,6 +39,9 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.model.filefolder.HiddenAspect;
import org.alfresco.repo.model.filefolder.HiddenAspect.Visibility;
import org.alfresco.service.cmr.dictionary.DictionaryService;
+import org.alfresco.service.cmr.lock.LockService;
+import org.alfresco.service.cmr.lock.LockStatus;
+import org.alfresco.service.cmr.lock.LockType;
import org.alfresco.service.cmr.model.FileFolderService;
import org.alfresco.service.cmr.model.FileFolderUtil;
import org.alfresco.service.cmr.model.FileInfo;
@@ -52,6 +55,7 @@ import org.alfresco.service.cmr.security.AccessStatus;
import org.alfresco.service.cmr.security.PermissionService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.FileFilterMode.Client;
+import org.alfresco.util.PropertyCheck;
import org.alfresco.util.SearchLanguageConversion;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -73,6 +77,7 @@ public class CifsHelper
private FileFolderService fileFolderService;
private MimetypeService mimetypeService;
private PermissionService permissionService;
+ private LockService lockService;
private HiddenAspect hiddenAspect;
private Set excludedTypes = new HashSet();
@@ -86,6 +91,16 @@ public class CifsHelper
{
}
+ public void init()
+ {
+ PropertyCheck.mandatory(this, "dictionaryService",dictionaryService);
+ PropertyCheck.mandatory(this, "nodeService",nodeService);
+ PropertyCheck.mandatory(this, "fileFolderService",fileFolderService);
+ PropertyCheck.mandatory(this, "permissionService",permissionService);
+ PropertyCheck.mandatory(this, "lockService",lockService);
+ PropertyCheck.mandatory(this, "mimetypeService",mimetypeService);
+ }
+
public void setDictionaryService(DictionaryService dictionaryService)
{
this.dictionaryService = dictionaryService;
@@ -244,25 +259,55 @@ public class CifsHelper
fileInfo.setAllocationSize((size + 512L) & 0xFFFFFFFFFFFFFE00L);
}
- // Check the lock status of the file
+ // Check whether the file is locked
- String lockTypeStr = (String) nodeProperties.get(ContentModel.PROP_LOCK_TYPE);
-
- if ( lockTypeStr != null )
+ if(nodeService.hasAspect(nodeRef, ContentModel.ASPECT_LOCKABLE))
{
- // File is locked so mark it as read-only and offline
-
- int attr = fileInfo.getFileAttributes();
-
- if (( attr & FileAttribute.ReadOnly) == 0)
- attr += FileAttribute.ReadOnly;
-
- if ( lockedFilesAsOffline)
- {
- attr += FileAttribute.NTOffline;
- }
+ LockType lockType = lockService.getLockType(nodeRef);
- fileInfo.setFileAttributes( attr);
+ int attr = fileInfo.getFileAttributes();
+
+ if(lockType != null)
+ {
+ switch(lockType)
+ {
+ case NODE_LOCK:
+ if (( attr & FileAttribute.ReadOnly) == 0)
+ attr += FileAttribute.ReadOnly;
+ break;
+ case WRITE_LOCK:
+ LockStatus lockStatus = lockService.getLockStatus(nodeRef);
+ if (lockStatus == LockStatus.LOCK_OWNER)
+ {
+ }
+ else
+ {
+ if (( attr & FileAttribute.ReadOnly) == 0)
+ {
+ attr += FileAttribute.ReadOnly;
+ }
+
+ if ( lockedFilesAsOffline)
+ {
+ attr += FileAttribute.NTOffline;
+ }
+ }
+ break;
+ case READ_ONLY_LOCK:
+ if (( attr & FileAttribute.ReadOnly) == 0)
+ {
+ attr += FileAttribute.ReadOnly;
+ }
+
+ if ( lockedFilesAsOffline)
+ {
+ attr += FileAttribute.NTOffline;
+ }
+ break;
+ }
+
+ fileInfo.setFileAttributes( attr);
+ }
}
// Check if it is a link node
@@ -727,4 +772,14 @@ public class CifsHelper
return false;
}
+ public void setLockService(LockService lockService)
+ {
+ this.lockService = lockService;
+ }
+
+ public LockService getLockService()
+ {
+ return lockService;
+ }
+
}
diff --git a/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java b/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java
index 55207718de..98b7b1fbdd 100644
--- a/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java
+++ b/source/java/org/alfresco/filesys/repo/CommandExecutorImpl.java
@@ -235,7 +235,7 @@ public class CommandExecutorImpl implements CommandExecutor
{
logger.debug("close file command");
CloseFileCommand c = (CloseFileCommand)command;
- repositoryDiskInterface.closeFile(c.getRootNodeRef(), c.getPath(), c.getNetworkFile());
+ repositoryDiskInterface.closeFile(sess, tree, c.getRootNodeRef(), c.getPath(), c.getNetworkFile());
}
else if(command instanceof ReduceQuotaCommand)
{
diff --git a/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java b/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java
index 6d9f61b7d1..1e6eb73ec6 100644
--- a/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java
+++ b/source/java/org/alfresco/filesys/repo/ContentDiskDriver.java
@@ -1267,8 +1267,8 @@ public class ContentDiskDriver extends AlfrescoTxDiskDriver implements DiskInter
if ( searchFileSpec.equals( "*"))
{
// Use a cache lookup search context
-
- CacheLookupSearchContext cacheContext = new CacheLookupSearchContext(cifsHelper, results, searchFileSpec, pseudoList, paths[0], ctx.getStateCache());
+
+ CacheLookupSearchContext cacheContext = new CacheLookupSearchContext(cifsHelper, results, searchFileSpec, pseudoList, paths[0], ctx.getStateCache(), isLockedFilesAsOffline);
searchCtx = cacheContext;
// Set the '.' and '..' pseudo file entry details
@@ -1355,9 +1355,9 @@ public class ContentDiskDriver extends AlfrescoTxDiskDriver implements DiskInter
}
else {
if ( ctx.hasStateCache())
- searchCtx = new CacheLookupSearchContext(cifsHelper, results, searchFileSpec, pseudoList, paths[0], ctx.getStateCache());
+ searchCtx = new CacheLookupSearchContext(cifsHelper, results, searchFileSpec, pseudoList, paths[0], ctx.getStateCache(), isLockedFilesAsOffline);
else
- searchCtx = new ContentSearchContext(cifsHelper, results, searchFileSpec, pseudoList, paths[0]);
+ searchCtx = new ContentSearchContext(cifsHelper, results, searchFileSpec, pseudoList, paths[0], isLockedFilesAsOffline);
}
// Debug
@@ -1949,6 +1949,10 @@ public class ContentDiskDriver extends AlfrescoTxDiskDriver implements DiskInter
if ( netFile != null) {
long id = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
netFile.setFileId(( int) ( id & 0xFFFFFFFFL));
+
+ // Indicate the file is open
+
+ netFile.setClosed( false);
}
// If the file has been opened for overwrite then truncate the file to zero length, this will
@@ -2120,6 +2124,10 @@ public class ContentDiskDriver extends AlfrescoTxDiskDriver implements DiskInter
netFile.truncateFile( 0L);
+ // Indicate the file is open
+
+ netFile.setClosed( false);
+
// Generate a file id for the file
if ( netFile != null) {
@@ -2499,6 +2507,23 @@ public class ContentDiskDriver extends AlfrescoTxDiskDriver implements DiskInter
if ( file.getGrantedAccess() > NetworkFile.ATTRIBUTESONLY && fstate.decrementOpenCount() == 0)
fstate.setSharedAccess( SharingMode.READWRITE + SharingMode.DELETE);
+ // Check if there is an oplock on the file
+
+ if ( file.hasOpLock()) {
+
+ // Release the oplock
+
+ OpLockInterface flIface = (OpLockInterface) this;
+ OpLockManager oplockMgr = flIface.getOpLockManager(sess, tree);
+
+ oplockMgr.releaseOpLock( file.getOpLock().getPath());
+
+ // DEBUG
+
+ if ( logger.isDebugEnabled())
+ logger.debug("Released oplock for closed file, file=" + file.getFullName());
+ }
+
// Check if there is a cached modification timestamp to be written out
if ( file.hasDeleteOnClose() == false && fstate.hasModifyDateTime() && fstate.hasFilesystemObject() && fstate.isDirectory() == false) {
@@ -3224,7 +3249,7 @@ public class ContentDiskDriver extends AlfrescoTxDiskDriver implements DiskInter
if ( logger.isDebugEnabled() && ctx.hasDebug(AlfrescoContext.DBG_RENAME))
logger.debug(" Found archived node " + archivedNode);
- if ( archivedNode != null )
+ if ( archivedNode != null && getNodeService().exists(archivedNode) )
{
// Restore the node
diff --git a/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java b/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java
index 091d6e5364..4ed6c35965 100644
--- a/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java
+++ b/source/java/org/alfresco/filesys/repo/ContentDiskDriver2.java
@@ -42,14 +42,11 @@ import org.alfresco.jlan.server.SrvSession;
import org.alfresco.jlan.server.core.DeviceContext;
import org.alfresco.jlan.server.core.DeviceContextException;
import org.alfresco.jlan.server.filesys.AccessDeniedException;
-import org.alfresco.jlan.server.filesys.AccessMode;
import org.alfresco.jlan.server.filesys.DirectoryNotEmptyException;
import org.alfresco.jlan.server.filesys.DiskDeviceContext;
import org.alfresco.jlan.server.filesys.DiskFullException;
import org.alfresco.jlan.server.filesys.DiskInterface;
import org.alfresco.jlan.server.filesys.DiskSizeInterface;
-import org.alfresco.jlan.server.filesys.FileAttribute;
-import org.alfresco.jlan.server.filesys.FileExistsException;
import org.alfresco.jlan.server.filesys.FileInfo;
import org.alfresco.jlan.server.filesys.FileName;
import org.alfresco.jlan.server.filesys.FileOpenParams;
@@ -72,7 +69,6 @@ import org.alfresco.jlan.server.locking.LockManager;
import org.alfresco.jlan.server.locking.OpLockInterface;
import org.alfresco.jlan.server.locking.OpLockManager;
import org.alfresco.jlan.smb.SMBException;
-import org.alfresco.jlan.smb.SMBStatus;
import org.alfresco.jlan.smb.server.SMBServer;
import org.alfresco.jlan.util.DataBuffer;
import org.alfresco.jlan.util.MemorySize;
@@ -792,7 +788,7 @@ public class ContentDiskDriver2 extends AlfrescoDiskDriver implements ExtendedD
pseudoList = ctx.getPseudoFileOverlay().searchPseudoFiles(dirNodeRef, searchFileSpec);
}
- DotDotContentSearchContext searchCtx = new DotDotContentSearchContext(getCifsHelper(), results, searchFileSpec, pseudoList, paths[0]);
+ DotDotContentSearchContext searchCtx = new DotDotContentSearchContext(getCifsHelper(), results, searchFileSpec, pseudoList, paths[0], isLockedFilesAsOffline);
FileInfo dotInfo = getCifsHelper().getFileInformation(searchRootNodeRef, false, isLockedFilesAsOffline);
@@ -2665,6 +2661,10 @@ public class ContentDiskDriver2 extends AlfrescoDiskDriver implements ExtendedD
{
long id = DefaultTypeConverter.INSTANCE.convert(Long.class, nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
netFile.setFileId(( int) ( id & 0xFFFFFFFFL));
+
+ // Indicate the file is open
+
+ netFile.setClosed( false);
}
if (logger.isDebugEnabled())
@@ -2715,13 +2715,30 @@ public class ContentDiskDriver2 extends AlfrescoDiskDriver implements ExtendedD
*
* @exception java.io.IOException If an error occurs.
*/
- public void closeFile(NodeRef rootNode, String path, NetworkFile file) throws IOException
+ public void closeFile(SrvSession session, TreeConnection tree, NodeRef rootNode, String path, NetworkFile file) throws IOException
{
if ( logger.isDebugEnabled())
{
logger.debug("Close file:" + path + ", readOnly=" + file.isReadOnly() );
}
+ // Check if there is an oplock on the file
+
+ if ( file.hasOpLock()) {
+
+ // Release the oplock
+
+ OpLockInterface flIface = (OpLockInterface) this;
+ OpLockManager oplockMgr = flIface.getOpLockManager(session, tree);
+
+ oplockMgr.releaseOpLock( file.getOpLock().getPath());
+
+ // DEBUG
+
+ if ( logger.isDebugEnabled())
+ logger.debug("Released oplock for closed file, file=" + file.getFullName());
+ }
+
if( file instanceof PseudoNetworkFile)
{
file.close();
diff --git a/source/java/org/alfresco/filesys/repo/ContentNetworkFile.java b/source/java/org/alfresco/filesys/repo/ContentNetworkFile.java
index 17d9ee8335..cfe6ca4398 100644
--- a/source/java/org/alfresco/filesys/repo/ContentNetworkFile.java
+++ b/source/java/org/alfresco/filesys/repo/ContentNetworkFile.java
@@ -169,7 +169,7 @@ public class ContentNetworkFile extends NodeRefNetworkFile
// If the file is read-only then only allow read access
- if ( netFile.isReadOnly())
+ if ( netFile.isReadOnly() && netFile.getGrantedAccess() == NetworkFile.READWRITE)
netFile.setGrantedAccess(NetworkFile.READONLY);
// DEBUG
@@ -222,8 +222,27 @@ public class ContentNetworkFile extends NodeRefNetworkFile
str.append( channel);
if ( channel != null)
str.append( writableChannel ? "(Write)" : "(Read)");
+
+ str.append(",");
+ str.append( getGrantedAccessAsString());
+
+ if ( hasLocks()) {
+ str.append(",locks=");
+ str.append( numberOfLocks());
+ }
+
+ if ( hasOpLock()) {
+ str.append(",oplock=");
+ str.append( getOpLock());
+ }
+
if ( modified)
- str.append( ",modified");
+ str.append( ",Modified");
+ if ( isClosed())
+ str.append(",Closed");
+ str.append(",open=");
+ str.append( getOpenCount());
+
str.append( "]");
return str.toString();
@@ -691,6 +710,10 @@ public class ContentNetworkFile extends NodeRefNetworkFile
public void openFile(boolean createFlag)
throws IOException
{
+ // Mark as open
+
+ setClosed( false);
+
// Wait for read/write before opening the content channel
}
diff --git a/source/java/org/alfresco/filesys/repo/ContentSearchContext.java b/source/java/org/alfresco/filesys/repo/ContentSearchContext.java
index d2abdcdbad..591d3eb80a 100644
--- a/source/java/org/alfresco/filesys/repo/ContentSearchContext.java
+++ b/source/java/org/alfresco/filesys/repo/ContentSearchContext.java
@@ -73,6 +73,8 @@ public class ContentSearchContext extends SearchContext
private PseudoFileList pseudoList;
private boolean donePseudoFiles = false;
+ private boolean lockedFilesAsOffline;
+
// Resume id
private int resumeId;
@@ -99,13 +101,15 @@ public class ContentSearchContext extends SearchContext
List results,
String searchStr,
PseudoFileList pseudoList,
- String relPath)
+ String relPath,
+ boolean lockedFilesAsOffline)
{
super();
super.setSearchString(searchStr);
this.cifsHelper = cifsHelper;
this.results = results;
this.pseudoList = pseudoList;
+ this.lockedFilesAsOffline = lockedFilesAsOffline;
m_relPath = relPath;
if ( m_relPath != null && m_relPath.endsWith( FileName.DOS_SEPERATOR_STR) == false)
@@ -243,7 +247,7 @@ public class ContentSearchContext extends SearchContext
// Get the file information and copy across to the caller's file info
- nextInfo = cifsHelper.getFileInformation(nextNodeRef, "", false, false);
+ nextInfo = cifsHelper.getFileInformation(nextNodeRef, "", false, lockedFilesAsOffline);
info.copyFrom(nextInfo);
/**
diff --git a/source/java/org/alfresco/filesys/repo/DotDotContentSearchContext.java b/source/java/org/alfresco/filesys/repo/DotDotContentSearchContext.java
index 7135bb1197..a42baec722 100644
--- a/source/java/org/alfresco/filesys/repo/DotDotContentSearchContext.java
+++ b/source/java/org/alfresco/filesys/repo/DotDotContentSearchContext.java
@@ -62,10 +62,11 @@ public class DotDotContentSearchContext extends ContentSearchContext {
List results,
String searchStr,
PseudoFileList pseudoList,
- String relPath)
+ String relPath,
+ boolean lockedFilesAsOffline)
{
- super(cifsHelper, results, searchStr, pseudoList, relPath);
+ super(cifsHelper, results, searchStr, pseudoList, relPath, lockedFilesAsOffline);
super.setSearchString(searchStr);
}
diff --git a/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java b/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java
index 4a66ac4f76..3b3f1cf3f9 100644
--- a/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java
+++ b/source/java/org/alfresco/filesys/repo/NonTransactionalRuleContentDiskDriver.java
@@ -157,7 +157,7 @@ public class NonTransactionalRuleContentDiskDriver implements ExtendedDiskInterf
EvaluatorContext ctx = getEvaluatorContext(driverState, folder);
- Operation o = new CloseFileOperation(file, param, rootNode, param.getFullName(), param.hasDeleteOnClose());
+ Operation o = new CloseFileOperation(file, param, rootNode, param.getFullName(), param.hasDeleteOnClose(), param.isForce());
Command c = ruleEvaluator.evaluate(ctx, o);
commandExecutor.execute(sess, tree, c);
diff --git a/source/java/org/alfresco/filesys/repo/TempNetworkFile.java b/source/java/org/alfresco/filesys/repo/TempNetworkFile.java
index 1350c59062..7e608bac09 100644
--- a/source/java/org/alfresco/filesys/repo/TempNetworkFile.java
+++ b/source/java/org/alfresco/filesys/repo/TempNetworkFile.java
@@ -30,6 +30,7 @@ public class TempNetworkFile extends JavaNetworkFile implements NetworkFileState
super(file, netPath);
setFullName(netPath);
setAttributes(FileAttribute.NTNormal);
+ setClosed(false);
}
/**
@@ -43,6 +44,7 @@ public class TempNetworkFile extends JavaNetworkFile implements NetworkFileState
super(file, netPath);
setFullName(netPath);
setAttributes(FileAttribute.NTNormal);
+ setClosed(false);
}
/**
diff --git a/source/java/org/alfresco/filesys/repo/rules/ScenarioOpenFileInstance.java b/source/java/org/alfresco/filesys/repo/rules/ScenarioOpenFileInstance.java
index c0f28327fa..833a4b639e 100644
--- a/source/java/org/alfresco/filesys/repo/rules/ScenarioOpenFileInstance.java
+++ b/source/java/org/alfresco/filesys/repo/rules/ScenarioOpenFileInstance.java
@@ -256,7 +256,7 @@ class ScenarioOpenFileInstance implements ScenarioInstance
if(isReadOnly(file))
{
// Read Only File
- if(openReadOnlyCount == 1)
+ if(openReadOnlyCount == 1 || c.isForce())
{
if(logger.isDebugEnabled())
{
@@ -303,7 +303,7 @@ class ScenarioOpenFileInstance implements ScenarioInstance
{
// This is a close of a Read Write File
// Read Only File
- if(openReadWriteCount == 1)
+ if(openReadWriteCount == 1 || c.isForce())
{
if(logger.isDebugEnabled())
{
diff --git a/source/java/org/alfresco/filesys/repo/rules/operations/CloseFileOperation.java b/source/java/org/alfresco/filesys/repo/rules/operations/CloseFileOperation.java
index 8af24bd937..5a0b351b61 100644
--- a/source/java/org/alfresco/filesys/repo/rules/operations/CloseFileOperation.java
+++ b/source/java/org/alfresco/filesys/repo/rules/operations/CloseFileOperation.java
@@ -35,14 +35,16 @@ public class CloseFileOperation implements Operation
private NetworkFile networkFile;
boolean deleteOnClose;
+ boolean force;
- public CloseFileOperation(String name, NetworkFile networkFile, NodeRef rootNodeRef, String path, boolean deleteOnClose)
+ public CloseFileOperation(String name, NetworkFile networkFile, NodeRef rootNodeRef, String path, boolean deleteOnClose, boolean force)
{
this.name = name;
this.networkFile = networkFile;
this.rootNodeRef = rootNodeRef;
this.path = path;
this.deleteOnClose = deleteOnClose;
+ this.force = force;
}
public String getName()
@@ -80,6 +82,11 @@ public class CloseFileOperation implements Operation
return deleteOnClose;
}
+ public boolean isForce()
+ {
+ return force;
+ }
+
public boolean equals(Object o)
{
if(o instanceof CloseFileOperation)
diff --git a/source/java/org/alfresco/repo/action/AsynchronousActionExecutionQueueImpl.java b/source/java/org/alfresco/repo/action/AsynchronousActionExecutionQueueImpl.java
index 87d5297b54..ec7f8e1d7c 100644
--- a/source/java/org/alfresco/repo/action/AsynchronousActionExecutionQueueImpl.java
+++ b/source/java/org/alfresco/repo/action/AsynchronousActionExecutionQueueImpl.java
@@ -26,8 +26,10 @@ import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadPoolExecutor;
+import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.error.StackTraceUtil;
import org.alfresco.repo.action.AsynchronousActionExecutionQueuePolicies.OnAsyncActionExecute;
+import org.alfresco.repo.content.transform.UnimportantTransformException;
import org.alfresco.repo.policy.ClassPolicyDelegate;
import org.alfresco.repo.policy.PolicyComponent;
import org.alfresco.repo.rule.RuleServiceImpl;
@@ -414,9 +416,19 @@ public class AsynchronousActionExecutionQueueImpl implements AsynchronousActionE
};
AuthenticationUtil.runAs(actionRunAs, userName);
}
- catch (Throwable exception)
+ catch (Throwable e)
{
- logger.error("Failed to execute asynchronous action: " + action, exception);
+ Throwable rootCause = (e instanceof AlfrescoRuntimeException) ? ((AlfrescoRuntimeException)e).getRootCause() : null;
+ String message = (rootCause == null ? null : rootCause.getMessage());
+ message = "Failed to execute asynchronous action: " + action+ (message == null ? "" : ": "+message);
+ if (rootCause instanceof UnimportantTransformException)
+ {
+ logger.debug(message);
+ }
+ else
+ {
+ logger.error(message, e);
+ }
}
handleAsyncActionIsCompleted(actionedUponNodeRef, action);
}
diff --git a/source/java/org/alfresco/repo/avm/AVMServiceTest.java b/source/java/org/alfresco/repo/avm/AVMServiceTest.java
index 6a00220c27..35535a9c60 100644
--- a/source/java/org/alfresco/repo/avm/AVMServiceTest.java
+++ b/source/java/org/alfresco/repo/avm/AVMServiceTest.java
@@ -366,7 +366,9 @@ public class AVMServiceTest extends AVMServiceTestBase
{
public Object execute() throws Throwable
{
- fService.createStore("StagingArea" + "-" + getName() + "-" + System.currentTimeMillis());
+ String storeName = "StagingArea" + "-" + getName() + "-" + System.currentTimeMillis();
+ fService.createStore(storeName);
+ fService.purgeStore(storeName);
return null;
}
};
diff --git a/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java b/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java
index d5abca02f9..c6f78684df 100644
--- a/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java
+++ b/source/java/org/alfresco/repo/coci/CheckOutCheckInServiceImpl.java
@@ -34,6 +34,7 @@ import org.alfresco.repo.coci.CheckOutCheckInServicePolicies.BeforeCheckOut;
import org.alfresco.repo.coci.CheckOutCheckInServicePolicies.OnCancelCheckOut;
import org.alfresco.repo.coci.CheckOutCheckInServicePolicies.OnCheckIn;
import org.alfresco.repo.coci.CheckOutCheckInServicePolicies.OnCheckOut;
+import org.alfresco.repo.lock.LockUtils;
import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.policy.ClassPolicyDelegate;
import org.alfresco.repo.policy.PolicyComponent;
@@ -42,6 +43,7 @@ import org.alfresco.service.cmr.coci.CheckOutCheckInService;
import org.alfresco.service.cmr.coci.CheckOutCheckInServiceException;
import org.alfresco.service.cmr.lock.LockService;
import org.alfresco.service.cmr.lock.LockType;
+import org.alfresco.service.cmr.lock.NodeLockedException;
import org.alfresco.service.cmr.lock.UnableToReleaseLockException;
import org.alfresco.service.cmr.model.FileExistsException;
import org.alfresco.service.cmr.model.FileFolderService;
@@ -373,6 +375,13 @@ public class CheckOutCheckInServiceImpl implements CheckOutCheckInService
throw new CheckOutCheckInServiceException(MSG_ERR_ALREADY_WORKING_COPY);
}
+ // It is not enough to check LockUtils.isLockedOrReadOnly in case when the same user does offline and online edit (for instance in two open browsers). In this case we get
+ // set ContentModel.ASPECT_LOCKABLE and LockType.WRITE_LOCK. So, here we have to check following
+ if (lockService.getLockType(nodeRef) == LockType.WRITE_LOCK)
+ {
+ throw new NodeLockedException(nodeRef);
+ }
+
behaviourFilter.disableBehaviour(nodeRef, ContentModel.ASPECT_AUDITABLE);
behaviourFilter.disableBehaviour(destinationParentNodeRef, ContentModel.ASPECT_AUDITABLE);
try
diff --git a/source/java/org/alfresco/repo/content/ContentServiceImpl.java b/source/java/org/alfresco/repo/content/ContentServiceImpl.java
index 1f88163b09..ca9edefda2 100644
--- a/source/java/org/alfresco/repo/content/ContentServiceImpl.java
+++ b/source/java/org/alfresco/repo/content/ContentServiceImpl.java
@@ -39,7 +39,9 @@ import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.repo.content.filestore.FileContentWriter;
import org.alfresco.repo.content.transform.ContentTransformer;
import org.alfresco.repo.content.transform.ContentTransformerRegistry;
+import org.alfresco.repo.content.transform.OOXMLThumbnailContentTransformer;
import org.alfresco.repo.content.transform.TransformerDebug;
+import org.alfresco.repo.content.transform.UnimportantTransformException;
import org.alfresco.repo.node.NodeServicePolicies;
import org.alfresco.repo.policy.ClassPolicyDelegate;
import org.alfresco.repo.policy.JavaBehaviour;
@@ -706,13 +708,31 @@ public class ContentServiceImpl implements ContentService, ApplicationContextAwa
boolean first = true;
for (Exception e : exceptions)
{
+ Throwable rootCause = (e instanceof AlfrescoRuntimeException) ? ((AlfrescoRuntimeException)e).getRootCause() : null;
+ String message = (rootCause == null ? null : rootCause.getMessage());
if (done)
{
- logger.error("Transformer succeeded after previous transformer failed.", e);
+ message = "Transformer succeeded after previous transformer failed"+ (message == null ? "" : ": "+message);
+ if (rootCause instanceof UnimportantTransformException)
+ {
+ logger.debug(message);
+ }
+ else
+ {
+ logger.error(message, e);
+ }
}
- else if (!first)
+ else if (!first) // The first exception is logged later
{
- logger.error("Transformer exception.", e);
+ message = "Transformer exception"+ (message == null ? "" : ": "+message);
+ if (rootCause instanceof UnimportantTransformException)
+ {
+ logger.debug(message);
+ }
+ else
+ {
+ logger.error(message, e);
+ }
first = false;
}
}
diff --git a/source/java/org/alfresco/repo/content/transform/OOXMLThumbnailContentTransformer.java b/source/java/org/alfresco/repo/content/transform/OOXMLThumbnailContentTransformer.java
index c882bd24dd..a8e62dc8ee 100644
--- a/source/java/org/alfresco/repo/content/transform/OOXMLThumbnailContentTransformer.java
+++ b/source/java/org/alfresco/repo/content/transform/OOXMLThumbnailContentTransformer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2011 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -48,6 +48,8 @@ import org.apache.poi.openxml4j.opc.PackageRelationshipTypes;
*/
public class OOXMLThumbnailContentTransformer extends AbstractContentTransformer2
{
+ public static final String NO_THUMBNAIL_PRESENT_IN_FILE = "No thumbnail present in file, unable to generate ";
+
private static final Log log = LogFactory.getLog(OOXMLThumbnailContentTransformer.class);
private static final List OOXML_MIMETYPES = Arrays.asList(new String[]{MimetypeMap.MIMETYPE_OPENXML_SPREADSHEET,
@@ -107,7 +109,7 @@ public class OOXMLThumbnailContentTransformer extends AbstractContentTransformer
else
{
log.debug("No thumbnail present in " + reader.toString());
- throw new AlfrescoRuntimeException("No thumbnail present in file, unable to generate " + targetMimetype);
+ throw new UnimportantTransformException(NO_THUMBNAIL_PRESENT_IN_FILE + targetMimetype);
}
}
catch (IOException e)
diff --git a/source/java/org/alfresco/repo/content/transform/TransformerDebug.java b/source/java/org/alfresco/repo/content/transform/TransformerDebug.java
index 7f0f3c2397..fc5320e525 100644
--- a/source/java/org/alfresco/repo/content/transform/TransformerDebug.java
+++ b/source/java/org/alfresco/repo/content/transform/TransformerDebug.java
@@ -306,7 +306,14 @@ public class TransformerDebug
{
Deque ourStack = ThreadInfo.getStack();
Frame frame = ourStack.peek();
-
+
+ // Override setDebugOutput(false) to allow debug when there are transformers but they are all unavailable
+ // Note once turned on we don't turn it off again.
+ if (transformers.size() == 0 &&
+ frame.unavailableTransformers != null &&
+ frame.unavailableTransformers.size() != 0) {
+ ThreadInfo.setDebugOutput(true);
+ }
// Log the basic info about this transformation
logBasicDetails(frame, sourceSize,
calledFrom + ((transformers.size() == 0) ? " NO transformers" : ""),
@@ -490,9 +497,8 @@ public class TransformerDebug
*/
public boolean isEnabled()
{
- return
- (logger.isDebugEnabled() && ThreadInfo.getDebugOutput()) ||
- logger.isTraceEnabled();
+ // Don't check ThreadInfo.getDebugOutput() as availableTransformers() may upgrade from trace to debug.
+ return logger.isDebugEnabled();
}
/**
diff --git a/source/java/org/alfresco/repo/content/transform/UnimportantTransformException.java b/source/java/org/alfresco/repo/content/transform/UnimportantTransformException.java
new file mode 100644
index 0000000000..dd191e5d57
--- /dev/null
+++ b/source/java/org/alfresco/repo/content/transform/UnimportantTransformException.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
+ *
+ * This file is part of Alfresco
+ *
+ * Alfresco is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Alfresco is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with Alfresco. If not, see .
+ */
+package org.alfresco.repo.content.transform;
+
+import org.alfresco.error.AlfrescoRuntimeException;
+
+/**
+ * An exception that may be thrown by a transformer that indicates that the transform
+ * could not be performed, but that a full stack trace is not required in logging as
+ * the reason is expected some of the time (for example source file does not contain an
+ * embedded image).
+ *
+ * @author Alan Davis
+ */
+public class UnimportantTransformException extends AlfrescoRuntimeException
+{
+ public UnimportantTransformException(String msgId)
+ {
+ super(msgId);
+ }
+}
diff --git a/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java b/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
index 02d9b6f455..ea18932679 100644
--- a/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
+++ b/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
@@ -36,6 +36,8 @@ import java.util.Set;
import java.util.SortedSet;
import java.util.Stack;
import java.util.TreeSet;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.ibatis.BatchingDAO;
@@ -112,7 +114,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
private static final String CACHE_REGION_NODES = "N.N";
private static final String CACHE_REGION_ASPECTS = "N.A";
private static final String CACHE_REGION_PROPERTIES = "N.P";
- private static final String CACHE_REGION_PARENT_ASSOCS = "N.PA";
private static final String KEY_LOST_NODE_PAIRS = AbstractNodeDAOImpl.class.getName() + ".lostNodePairs";
private static final String KEY_DELETED_ASSOCS = AbstractNodeDAOImpl.class.getName() + ".deletedAssocs";
@@ -180,12 +181,13 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
*/
private EntityLookupCache, Serializable> propertiesCache;
/**
- * Cache for the Node parent assocs:
- * KEY: NodeVersionKey
- * VALUE: ParentAssocs
- * VALUE KEY: None
+ * Non-clustered cache for the Node parent assocs:
+ * KEY: (nodeId, txnId) pair
+ * VALUE: ParentAssocs
*/
- private EntityLookupCache parentAssocsCache;
+ private ParentAssocsCache parentAssocsCache;
+ private int parentAssocsCacheSize;
+ private int parentAssocsCacheLimitFactor = 8;
/**
* Cache for fast lookups of child nodes by cm:name.
@@ -205,7 +207,6 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
nodesCache = new EntityLookupCache(new NodesCacheCallbackDAO());
aspectsCache = new EntityLookupCache, Serializable>(new AspectsCallbackDAO());
propertiesCache = new EntityLookupCache, Serializable>(new PropertiesCallbackDAO());
- parentAssocsCache = new EntityLookupCache(new ParentAssocsCallbackDAO());
childByNameCache = new NullCache();
}
@@ -365,16 +366,27 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
/**
- * Set the cache that maintains the Node parent associations
+ * Sets the maximum capacity of the parent assocs cache
*
- * @param parentAssocsCache the cache
+ * @param parentAssocsCacheSize the cache size
*/
- public void setParentAssocsCache(SimpleCache parentAssocsCache)
+ public void setParentAssocsCacheSize(int parentAssocsCacheSize)
{
- this.parentAssocsCache = new EntityLookupCache(
- parentAssocsCache,
- CACHE_REGION_PARENT_ASSOCS,
- new ParentAssocsCallbackDAO());
+ this.parentAssocsCacheSize = parentAssocsCacheSize;
+ }
+
+ /**
+ * Sets the average number of parents expected per cache entry. This parameter is multiplied by the
+ * {@link #setParentAssocsCacheSize(int)} parameter to compute a limit on the total number of cached parents, which
+ * will be proportional to the cache's memory usage. The cache will be pruned when this limit is exceeded to avoid
+ * excessive memory usage.
+ *
+ * @param parentAssocsCacheLimitFactor
+ * the parentAssocsCacheLimitFactor to set
+ */
+ public void setParentAssocsCacheLimitFactor(int parentAssocsCacheLimitFactor)
+ {
+ this.parentAssocsCacheLimitFactor = parentAssocsCacheLimitFactor;
}
/**
@@ -404,6 +416,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
PropertyCheck.mandatory(this, "nodeIndexer", nodeIndexer);
this.nodePropertyHelper = new NodePropertyHelper(dictionaryService, qnameDAO, localeDAO, contentDataDAO);
+ this.parentAssocsCache = new ParentAssocsCache(this.parentAssocsCacheSize, this.parentAssocsCacheLimitFactor);
}
/*
@@ -551,8 +564,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
Node node = nodesCache.getValue(nodeId);
if (node != null)
{
- NodeVersionKey nodeVersionKey = node.getNodeVersionKey();
- invalidateNodeCaches(nodeVersionKey, true, true, true);
+ invalidateNodeCaches(node, true, true, true);
}
// Finally remove the node reference
nodesCache.removeByKey(nodeId);
@@ -561,14 +573,12 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
/**
* Invalidate specific node caches using an exact key
*
- * @param nodeVersionKey the node ID-VERSION key to use
+ * @param node the node in question
*/
- private void invalidateNodeCaches(
- NodeVersionKey nodeVersionKey,
- boolean invalidateNodeAspectsCache,
- boolean invalidateNodePropertiesCache,
- boolean invalidateParentAssocsCache)
+ private void invalidateNodeCaches(Node node, boolean invalidateNodeAspectsCache,
+ boolean invalidateNodePropertiesCache, boolean invalidateParentAssocsCache)
{
+ NodeVersionKey nodeVersionKey = node.getNodeVersionKey();
if (invalidateNodeAspectsCache)
{
aspectsCache.removeByKey(nodeVersionKey);
@@ -579,11 +589,10 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
if (invalidateParentAssocsCache)
{
- parentAssocsCache.removeByKey(nodeVersionKey);
+ invalidateParentAssocsCached(node);
}
}
-
/*
* Transactions
*/
@@ -1646,9 +1655,14 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
copyNodePropertiesCached(nodeVersionKey, newNodeVersionKey);
}
- if (!invalidateParentAssocsCache)
+ if (invalidateParentAssocsCache)
{
- copyParentAssocsCached(nodeVersionKey, newNodeVersionKey);
+ // Because we cache parent assocs by transaction, we must manually invalidate on this version change
+ invalidateParentAssocsCached(node);
+ }
+ else
+ {
+ copyParentAssocsCached(node);
}
}
else
@@ -1656,7 +1670,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// The node was not touched. By definition it MUST be in the current transaction.
// We invalidate the caches as specifically requested
invalidateNodeCaches(
- nodeVersionKey,
+ node,
invalidateNodeAspectsCache,
invalidateNodePropertiesCache,
invalidateParentAssocsCache);
@@ -2197,7 +2211,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
NodeVersionKey newNodeVersionKey = getNodeNotNull(nodeId, false).getNodeVersionKey();
copyNodeAspectsCached(nodeVersionKey, newNodeVersionKey);
copyNodePropertiesCached(nodeVersionKey, newNodeVersionKey);
- copyParentAssocsCached(nodeVersionKey, newNodeVersionKey);
+ copyParentAssocsCached(node);
}
}
else if (modifyProps)
@@ -3015,8 +3029,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
ChildAssocEntity assoc = newChildAssocImpl(
parentNodeId, childNodeId, false, assocTypeQName, assocQName, childNodeName, false);
Long assocId = assoc.getId();
- // Touch the node; all caches are fine
- touchNode(childNodeId, null, null, false, false, false);
+ // Touch the node; parent assocs have been updated
+ touchNode(childNodeId, null, null, false, false, true);
// update cache
parentAssocInfo = parentAssocInfo.addAssoc(assocId, assoc);
setParentAssocsCached(childNodeId, parentAssocInfo);
@@ -3045,8 +3059,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
throw new ConcurrencyFailureException("Child association not deleted: " + assocId);
}
- // Touch the node; all caches are fine
- touchNode(childNodeId, null, null, false, false, false);
+ // Touch the node; parent assocs have been updated
+ touchNode(childNodeId, null, null, false, false, true);
// Update cache
parentAssocInfo = parentAssocInfo.removeAssoc(assocId);
setParentAssocsCached(childNodeId, parentAssocInfo);
@@ -3925,20 +3939,174 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
}
// done
}
-
+
/**
- * @return Returns a node's parent associations
+ * A Map-like class for storing ParentAssocsInfos. It prunes its oldest ParentAssocsInfo entries not only when a
+ * capacity is reached, but also when a total number of cached parents is reached, as this is what dictates the
+ * overall memory usage.
+ */
+ private static class ParentAssocsCache
+ {
+ private final ReadWriteLock lock = new ReentrantReadWriteLock();
+ private final int size;
+ private final int maxParentCount;
+ private final Map, ParentAssocsInfo> cache;
+ private final Map, Pair > nextKeys;
+ private final Map, Pair > previousKeys;
+ private Pair firstKey;
+ private Pair lastKey;
+ private int parentCount;
+
+ /**
+ * @param size
+ * @param limitFactor
+ */
+ public ParentAssocsCache(int size, int limitFactor)
+ {
+ this.size = size;
+ this.maxParentCount = size * limitFactor;
+ final int mapSize = size * 2;
+ this.cache = new HashMap, ParentAssocsInfo>(mapSize);
+ this.nextKeys = new HashMap, Pair >(mapSize);
+ this.previousKeys = new HashMap, Pair >(mapSize);
+ }
+
+ private ParentAssocsInfo get(Pair cacheKey)
+ {
+ lock.readLock().lock();
+ try
+ {
+ return cache.get(cacheKey);
+ }
+ finally
+ {
+ lock.readLock().unlock();
+ }
+ }
+
+ private void put(Pair cacheKey, ParentAssocsInfo parentAssocs)
+ {
+ lock.writeLock().lock();
+ try
+ {
+ // If an entry already exists, remove it and do the necessary housekeeping
+ if (cache.containsKey(cacheKey))
+ {
+ remove(cacheKey);
+ }
+
+ // Add the value and prepend the key
+ cache.put(cacheKey, parentAssocs);
+ if (firstKey == null)
+ {
+ lastKey = cacheKey;
+ }
+ else
+ {
+ nextKeys.put(cacheKey, firstKey);
+ previousKeys.put(firstKey, cacheKey);
+ }
+ firstKey = cacheKey;
+ parentCount += parentAssocs.getParentAssocs().size();
+
+ // Now prune the oldest entries whilst we have more cache entries or cached parents than desired
+ int currentSize = cache.size();
+ while (currentSize > size || parentCount > maxParentCount)
+ {
+ remove(lastKey);
+ currentSize--;
+ }
+ }
+ finally
+ {
+ lock.writeLock().unlock();
+ }
+ }
+
+ private ParentAssocsInfo remove(Pair cacheKey)
+ {
+ lock.writeLock().lock();
+ try
+ {
+ // Remove from the map
+ ParentAssocsInfo oldParentAssocs = cache.remove(cacheKey);
+
+ // If the object didn't exist, we are done
+ if (oldParentAssocs == null)
+ {
+ return null;
+ }
+
+ // Re-link the list
+ Pair previousCacheKey = previousKeys.remove(cacheKey);
+ Pair nextCacheKey = nextKeys.remove(cacheKey);
+ if (nextCacheKey == null)
+ {
+ if (previousCacheKey == null)
+ {
+ firstKey = lastKey = null;
+ }
+ else
+ {
+ lastKey = previousCacheKey;
+ nextKeys.remove(previousCacheKey);
+ }
+ }
+ else
+ {
+ if (previousCacheKey == null)
+ {
+ firstKey = nextCacheKey;
+ previousKeys.remove(nextCacheKey);
+ }
+ else
+ {
+ nextKeys.put(previousCacheKey, nextCacheKey);
+ previousKeys.put(nextCacheKey, previousCacheKey);
+ }
+ }
+ // Update the parent count
+ parentCount -= oldParentAssocs.getParentAssocs().size();
+ return oldParentAssocs;
+ }
+ finally
+ {
+ lock.writeLock().unlock();
+ }
+ }
+
+ private void clear()
+ {
+ lock.writeLock().lock();
+ try
+ {
+ cache.clear();
+ nextKeys.clear();
+ previousKeys.clear();
+ firstKey = lastKey = null;
+ parentCount = 0;
+ }
+ finally
+ {
+ lock.writeLock().unlock();
+ }
+ }
+ }
+
+ /**
+ * @return Returns a node's parent associations
*/
private ParentAssocsInfo getParentAssocsCached(Long nodeId)
{
- NodeVersionKey nodeVersionKey = getNodeNotNull(nodeId, false).getNodeVersionKey();
- Pair cacheEntry = parentAssocsCache.getByKey(nodeVersionKey);
- if (cacheEntry == null)
+ Node node = getNodeNotNull(nodeId, false);
+ Pair cacheKey = new Pair(nodeId, node.getTransaction().getChangeTxnId());
+ ParentAssocsInfo value = parentAssocsCache.get(cacheKey);
+ if (value == null)
{
- invalidateNodeCaches(nodeId);
- throw new DataIntegrityViolationException("Invalid node ID: " + nodeId);
+ value = loadParentAssocs(node.getNodeVersionKey());
+ parentAssocsCache.put(cacheKey, value);
}
- return cacheEntry.getSecond();
+ return value;
}
/**
@@ -3946,84 +4114,96 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
*/
private void setParentAssocsCached(Long nodeId, ParentAssocsInfo parentAssocs)
{
- NodeVersionKey nodeVersionKey = getNodeNotNull(nodeId, false).getNodeVersionKey();
- parentAssocsCache.setValue(nodeVersionKey, parentAssocs);
+ Node node = getNodeNotNull(nodeId, false);
+ Pair cacheKey = new Pair(nodeId, node.getTransaction().getChangeTxnId());
+ parentAssocsCache.put(cacheKey, parentAssocs);
}
/**
* Helper method to copy cache values from one key to another
*/
- private void copyParentAssocsCached(NodeVersionKey from, NodeVersionKey to)
+ private void copyParentAssocsCached(Node from)
{
- ParentAssocsInfo cacheEntry = parentAssocsCache.getValue(from);
+ String fromTransactionId = from.getTransaction().getChangeTxnId();
+ String toTransactionId = getCurrentTransaction().getChangeTxnId();
+ // If the node is already in this transaction, there's nothing to do
+ if (fromTransactionId.equals(toTransactionId))
+ {
+ return;
+ }
+ Pair cacheKey = new Pair(from.getId(), fromTransactionId);
+ ParentAssocsInfo cacheEntry = parentAssocsCache.get(cacheKey);
if (cacheEntry != null)
{
- parentAssocsCache.setValue(to, cacheEntry);
+ parentAssocsCache.put(new Pair(from.getId(), toTransactionId), cacheEntry);
}
}
/**
- * Callback to cache node parent assocs.
- *
- * @author Derek Hulley
- * @since 3.4
+ * Helper method to remove associations relating to a cached node
*/
- private class ParentAssocsCallbackDAO extends EntityLookupCallbackDAOAdaptor
+ private void invalidateParentAssocsCached(Node node)
{
- public Pair createValue(ParentAssocsInfo value)
+ // Invalidate both the node and current transaction ID, just in case
+ Long nodeId = node.getId();
+ String nodeTransactionId = node.getTransaction().getChangeTxnId();
+ parentAssocsCache.remove(new Pair(nodeId, nodeTransactionId));
+ if (AlfrescoTransactionSupport.getTransactionReadState() == TxnReadState.TXN_READ_WRITE)
{
- throw new UnsupportedOperationException("Nodes are created independently.");
- }
-
- public Pair findByKey(NodeVersionKey nodeVersionKey)
- {
- Long nodeId = nodeVersionKey.getNodeId();
- // Find out if it is a root or store root
- boolean isRoot = hasNodeAspect(nodeId, ContentModel.ASPECT_ROOT);
- boolean isStoreRoot = getNodeType(nodeId).equals(ContentModel.TYPE_STOREROOT);
-
- // Select all the parent associations
- List assocs = selectParentAssocs(nodeId);
-
- // Build the cache object
- ParentAssocsInfo value = new ParentAssocsInfo(isRoot, isStoreRoot, assocs);
-
- // Now check if we are seeing the correct version of the node
- if (assocs.isEmpty())
+ String currentTransactionId = getCurrentTransaction().getChangeTxnId();
+ if (!currentTransactionId.equals(nodeTransactionId))
{
- // No results. Currently Alfresco has very few parentless nodes (root nodes)
- // and the lack of parent associations will be cached, anyway.
- // But to match earlier fixes of ALF-12393, we do a double-check of the node's details
- NodeEntity nodeCheckFromDb = selectNodeById(nodeId);
- if (nodeCheckFromDb == null || !nodeCheckFromDb.getNodeVersionKey().equals(nodeVersionKey))
- {
- // The node is gone or has moved on in version
- invalidateNodeCaches(nodeId);
- throw new DataIntegrityViolationException(
- "Detected stale node entry: " + nodeVersionKey +
- " (now " + nodeCheckFromDb + ")");
- }
+ parentAssocsCache.remove(new Pair(nodeId, currentTransactionId));
}
- else
- {
- ChildAssocEntity childAssoc = assocs.get(0);
- // What is the real (at least to this txn) version of the child node?
- NodeVersionKey childNodeVersionKeyFromDb = childAssoc.getChildNode().getNodeVersionKey();
- if (!childNodeVersionKeyFromDb.equals(nodeVersionKey))
- {
- // This method was called with a stale version
- invalidateNodeCaches(nodeId);
- throw new DataIntegrityViolationException(
- "Detected stale node entry: " + nodeVersionKey +
- " (now " + childNodeVersionKeyFromDb + ")");
- }
- }
-
- // Done
- return new Pair(nodeVersionKey, value);
- }
+ }
}
+ private ParentAssocsInfo loadParentAssocs(NodeVersionKey nodeVersionKey)
+ {
+ Long nodeId = nodeVersionKey.getNodeId();
+ // Find out if it is a root or store root
+ boolean isRoot = hasNodeAspect(nodeId, ContentModel.ASPECT_ROOT);
+ boolean isStoreRoot = getNodeType(nodeId).equals(ContentModel.TYPE_STOREROOT);
+
+ // Select all the parent associations
+ List assocs = selectParentAssocs(nodeId);
+
+ // Build the cache object
+ ParentAssocsInfo value = new ParentAssocsInfo(isRoot, isStoreRoot, assocs);
+
+ // Now check if we are seeing the correct version of the node
+ if (assocs.isEmpty())
+ {
+ // No results. Currently Alfresco has very few parentless nodes (root nodes)
+ // and the lack of parent associations will be cached, anyway.
+ // But to match earlier fixes of ALF-12393, we do a double-check of the node's details
+ NodeEntity nodeCheckFromDb = selectNodeById(nodeId);
+ if (nodeCheckFromDb == null || !nodeCheckFromDb.getNodeVersionKey().equals(nodeVersionKey))
+ {
+ // The node is gone or has moved on in version
+ invalidateNodeCaches(nodeId);
+ throw new DataIntegrityViolationException(
+ "Detected stale node entry: " + nodeVersionKey +
+ " (now " + nodeCheckFromDb + ")");
+ }
+ }
+ else
+ {
+ ChildAssocEntity childAssoc = assocs.get(0);
+ // What is the real (at least to this txn) version of the child node?
+ NodeVersionKey childNodeVersionKeyFromDb = childAssoc.getChildNode().getNodeVersionKey();
+ if (!childNodeVersionKeyFromDb.equals(nodeVersionKey))
+ {
+ // This method was called with a stale version
+ invalidateNodeCaches(nodeId);
+ throw new DataIntegrityViolationException(
+ "Detected stale node entry: " + nodeVersionKey +
+ " (now " + childNodeVersionKeyFromDb + ")");
+ }
+ }
+ return value;
+ }
+
/*
* Bulk caching
*/
diff --git a/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java b/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java
index 93d7001b65..70bd06de24 100644
--- a/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java
+++ b/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java
@@ -25,6 +25,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -64,8 +65,8 @@ public class ParentAssocsInfo implements Serializable
this.isRoot = isRoot;
this.isStoreRoot = isStoreRoot;
Long primaryAssocId = null;
- // Build map of child associations
- Map parentAssocsById = new HashMap(5);
+ // Build compact map of child associations
+ Map parentAssocsById = new TreeMap();
for (ChildAssocEntity parentAssoc : parents)
{
Long parentAssocId = parentAssoc.getId();
diff --git a/source/java/org/alfresco/repo/domain/usage/AbstractUsageDAOImpl.java b/source/java/org/alfresco/repo/domain/usage/AbstractUsageDAOImpl.java
index a7983d0287..3a765ca907 100644
--- a/source/java/org/alfresco/repo/domain/usage/AbstractUsageDAOImpl.java
+++ b/source/java/org/alfresco/repo/domain/usage/AbstractUsageDAOImpl.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -138,11 +138,17 @@ public abstract class AbstractUsageDAOImpl implements UsageDAO
selectUsersWithUsage(storeRef, handler);
}
+ public Long getContentSizeForStoreForUser(StoreRef storeRef, String userName)
+ {
+ return selectContentSizeForStoreForUser(storeRef, userName);
+ }
+
protected abstract UsageDeltaEntity insertUsageDeltaEntity(UsageDeltaEntity entity);
protected abstract UsageDeltaEntity selectTotalUsageDeltaSize(long nodeEntityId);
protected abstract List selectUsageDeltaNodes();
protected abstract void selectUsersWithoutUsage(StoreRef storeRef, MapHandler handler);
protected abstract void selectUsersWithUsage(StoreRef storeRef, MapHandler handler);
protected abstract void selectUserContentSizesForStore(StoreRef storeRef, MapHandler resultsCallback);
+ protected abstract Long selectContentSizeForStoreForUser(StoreRef storeRef, String userName);
protected abstract int deleteUsageDeltaEntitiesByNodeId(long nodeEntityId);
}
diff --git a/source/java/org/alfresco/repo/domain/usage/UsageDAO.java b/source/java/org/alfresco/repo/domain/usage/UsageDAO.java
index 832636d67e..08984cf791 100644
--- a/source/java/org/alfresco/repo/domain/usage/UsageDAO.java
+++ b/source/java/org/alfresco/repo/domain/usage/UsageDAO.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -102,6 +102,15 @@ public interface UsageDAO
StoreRef storeRef,
MapHandler resultsCallback);
+ /**
+ * Get user with a calculated usage
+ *
+ * @param storeRef the store to search in
+ * @param userName the username
+ * @return Returns ussage
+ */
+ public Long getContentSizeForStoreForUser(StoreRef storeRef, String userName);
+
/**
* A callback handler for iterating over the String results
*/
diff --git a/source/java/org/alfresco/repo/domain/usage/ibatis/UsageDAOImpl.java b/source/java/org/alfresco/repo/domain/usage/ibatis/UsageDAOImpl.java
index 19d37a6789..38e8a5a178 100644
--- a/source/java/org/alfresco/repo/domain/usage/ibatis/UsageDAOImpl.java
+++ b/source/java/org/alfresco/repo/domain/usage/ibatis/UsageDAOImpl.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -50,6 +50,7 @@ public class UsageDAOImpl extends AbstractUsageDAOImpl
private static final String SELECT_USERS_WITH_USAGE = "alfresco.usage.select_GetUsersWithUsage";
private static final String SELECT_USERS_WITHOUT_USAGE = "alfresco.usage.select_GetUsersWithoutUsage";
private static final String SELECT_CONTENT_SIZES_NEW = "alfresco.usage.select_GetContentSizesForStoreNew";
+ private static final String SELECT_CONTENT_SIZE_NEW_USER = "alfresco.usage.select_GetContentSizeForStoreForUser";
private static final String DELETE_USAGE_DELTAS_BY_NODE = "alfresco.usage.delete_UsageDeltasByNodeId";
@@ -201,6 +202,35 @@ public class UsageDAOImpl extends AbstractUsageDAOImpl
}
}
+ @Override
+ protected Long selectContentSizeForStoreForUser(StoreRef storeRef, String userName)
+ {
+ Pair contentTypeQNamePair = qnameDAO.getQName(ContentModel.TYPE_CONTENT);
+ Pair ownerPropQNamePair = qnameDAO.getQName(ContentModel.PROP_OWNER);
+ Pair contentPropQNamePair = qnameDAO.getQName(ContentModel.PROP_CONTENT);
+
+ if (contentTypeQNamePair == null || ownerPropQNamePair == null || contentPropQNamePair == null)
+ {
+ return null; // The statics have not been used, so there can be no results
+ }
+
+ Long contentTypeQNameEntityId = contentTypeQNamePair.getFirst();
+ Long ownerPropQNameEntityId = ownerPropQNamePair.getFirst();
+ Long contentPropQNameEntityId = contentPropQNamePair.getFirst();
+
+ Map params = new HashMap(6);
+ params.put("contentTypeQNameID", contentTypeQNameEntityId); // cm:content (type)
+ params.put("ownerPropQNameID", ownerPropQNameEntityId); // cm:owner (prop)
+ params.put("contentPropQNameID", contentPropQNameEntityId); // cm:content (prop)
+ params.put("storeProtocol", storeRef.getProtocol());
+ params.put("storeIdentifier", storeRef.getIdentifier());
+ params.put("userName", userName);
+ params.put("userName2", userName);
+
+ // Query for the 'new' (FK) style content data properties (stored in 'string_value')
+ return (Long)template.selectOne(SELECT_CONTENT_SIZE_NEW_USER, params);
+ }
+
/**
* Row handler for getting map of strings
*/
diff --git a/source/java/org/alfresco/repo/jscript/ScriptNode.java b/source/java/org/alfresco/repo/jscript/ScriptNode.java
index 461b5e1e15..7104a7cd2a 100644
--- a/source/java/org/alfresco/repo/jscript/ScriptNode.java
+++ b/source/java/org/alfresco/repo/jscript/ScriptNode.java
@@ -48,6 +48,7 @@ import org.alfresco.model.ContentModel;
import org.alfresco.query.PagingRequest;
import org.alfresco.query.PagingResults;
import org.alfresco.repo.action.executer.TransformActionExecuter;
+import org.alfresco.repo.content.transform.UnimportantTransformException;
import org.alfresco.repo.content.transform.magick.ImageTransformationOptions;
import org.alfresco.repo.model.filefolder.FileFolderServiceImpl.InvalidTypeException;
import org.alfresco.repo.search.QueryParameterDefImpl;
@@ -2823,16 +2824,29 @@ public class ScriptNode implements Scopeable, NamespacePrefixResolverProvider
// Have the thumbnail created
if (async == false)
{
- // Create the thumbnail
- NodeRef thumbnailNodeRef = this.services.getThumbnailService().createThumbnail(
+ try
+ {
+ // Create the thumbnail
+ NodeRef thumbnailNodeRef = this.services.getThumbnailService().createThumbnail(
this.nodeRef,
ContentModel.PROP_CONTENT,
details.getMimetype(),
details.getTransformationOptions(),
details.getName());
- // Create the thumbnail script object
- result = new ScriptThumbnail(thumbnailNodeRef, this.services, this.scope);
+ // Create the thumbnail script object
+ result = new ScriptThumbnail(thumbnailNodeRef, this.services, this.scope);
+ }
+ catch (AlfrescoRuntimeException e)
+ {
+ Throwable rootCause = e.getRootCause();
+ if (rootCause instanceof UnimportantTransformException)
+ {
+ logger.debug("Unable to create thumbnail '" + details.getName() + "' as "+rootCause.getMessage());
+ return null;
+ }
+ throw e;
+ }
}
else
{
diff --git a/source/java/org/alfresco/repo/lock/LockUtils.java b/source/java/org/alfresco/repo/lock/LockUtils.java
new file mode 100644
index 0000000000..5bf37fbb87
--- /dev/null
+++ b/source/java/org/alfresco/repo/lock/LockUtils.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2005-2012 Alfresco Software Limited.
+ *
+ * This file is part of Alfresco
+ *
+ * Alfresco is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Alfresco is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with Alfresco. If not, see .
+ */
+package org.alfresco.repo.lock;
+
+import org.alfresco.service.cmr.lock.LockService;
+import org.alfresco.service.cmr.lock.LockStatus;
+import org.alfresco.service.cmr.lock.LockType;
+import org.alfresco.service.cmr.repository.NodeRef;
+
+public class LockUtils
+{
+
+ /**
+ * Indicates if the node is unlocked or the current user has a WRITE_LOCK
+ *
+ * Ideally this would be a new method on the lockService, but cannot do this at the moment,
+ * as this method is being added as part of a hot fix, so a public service cannot change
+ * as the RM AMP might be installed and it has its own security context which would also need
+ * to reflect this change.
+ */
+ public static boolean isLockedOrReadOnly(NodeRef nodeRef, LockService lockService)
+ {
+ LockStatus lockStatus = lockService.getLockStatus(nodeRef);
+ LockType lockType = lockService.getLockType(nodeRef);
+ return ! (lockStatus == LockStatus.NO_LOCK || (lockStatus == LockStatus.LOCK_OWNER && lockType == LockType.WRITE_LOCK));
+ }
+}
diff --git a/source/java/org/alfresco/repo/node/NodeServiceTest.java b/source/java/org/alfresco/repo/node/NodeServiceTest.java
index 893ee0f979..bcc8f15166 100644
--- a/source/java/org/alfresco/repo/node/NodeServiceTest.java
+++ b/source/java/org/alfresco/repo/node/NodeServiceTest.java
@@ -41,7 +41,6 @@ import org.alfresco.repo.domain.node.Node;
import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.node.NodeEntity;
import org.alfresco.repo.domain.node.NodeVersionKey;
-import org.alfresco.repo.domain.node.ParentAssocsInfo;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.domain.query.CannedQueryDAO;
import org.alfresco.repo.node.NodeServicePolicies.BeforeCreateNodePolicy;
@@ -109,7 +108,6 @@ public class NodeServiceTest extends TestCase
private SimpleCache nodesCache;
private SimpleCache propsCache;
private SimpleCache aspectsCache;
- private SimpleCache parentAssocsCache;
/** populated during setup */
protected NodeRef rootNodeRef;
@@ -131,13 +129,11 @@ public class NodeServiceTest extends TestCase
nodesCache = (SimpleCache) ctx.getBean("node.nodesSharedCache");
propsCache = (SimpleCache) ctx.getBean("node.propertiesSharedCache");
aspectsCache = (SimpleCache) ctx.getBean("node.aspectsSharedCache");
- parentAssocsCache = (SimpleCache) ctx.getBean("node.parentAssocsSharedCache");
// Clear the caches to remove fluff
nodesCache.clear();
propsCache.clear();
aspectsCache.clear();
- parentAssocsCache.clear();
AuthenticationUtil.setRunAsUserSystem();
@@ -671,18 +667,15 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsOne = (Map) findCacheValue(propsCache, nodeKeyOne);
Set nodeAspectsOne = (Set) findCacheValue(aspectsCache, nodeKeyOne);
- ParentAssocsInfo nodeParentAssocsOne = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyOne);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(1L), nodeKeyOne.getVersion());
assertNotNull("No cache entry for properties", nodePropsOne);
assertNotNull("No cache entry for aspects", nodeAspectsOne);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsOne);
assertEquals("Property count incorrect", 1, nodePropsOne.size());
assertNotNull("Expected a cm:name property", nodePropsOne.get(ContentModel.PROP_NAME));
assertEquals("Aspect count incorrect", 1, nodeAspectsOne.size());
assertTrue("Expected a cm:auditable aspect", nodeAspectsOne.contains(ContentModel.ASPECT_AUDITABLE));
- assertEquals("Parent assoc count incorrect", 1, nodeParentAssocsOne.getParentAssocs().size());
// Add a property
nodeService.setProperty(nodeRef, PROP_RESIDUAL, GUID.generate());
@@ -690,10 +683,8 @@ public class NodeServiceTest extends TestCase
// Get the values for the previous version
Map nodePropsOneCheck = (Map) findCacheValue(propsCache, nodeKeyOne);
Set nodeAspectsOneCheck = (Set) findCacheValue(aspectsCache, nodeKeyOne);
- ParentAssocsInfo nodeParentAssocsOneCheck = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyOne);
assertTrue("Previous cache entries must be left alone", nodePropsOneCheck == nodePropsOne);
assertTrue("Previous cache entries must be left alone", nodeAspectsOneCheck == nodeAspectsOne);
- assertTrue("Previous cache entries must be left alone", nodeParentAssocsOneCheck == nodeParentAssocsOne);
// Get the current node cache key
Node nodeTwo = (Node) findCacheValue(nodesCache, nodeId);
@@ -703,19 +694,16 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsTwo = (Map) findCacheValue(propsCache, nodeKeyTwo);
Set nodeAspectsTwo = (Set) findCacheValue(aspectsCache, nodeKeyTwo);
- ParentAssocsInfo nodeParentAssocsTwo = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyTwo);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(2L), nodeKeyTwo.getVersion());
assertNotNull("No cache entry for properties", nodePropsTwo);
assertNotNull("No cache entry for aspects", nodeAspectsTwo);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsTwo);
assertTrue("Properties must have moved on", nodePropsTwo != nodePropsOne);
assertEquals("Property count incorrect", 2, nodePropsTwo.size());
assertNotNull("Expected a cm:name property", nodePropsTwo.get(ContentModel.PROP_NAME));
assertNotNull("Expected a residual property", nodePropsTwo.get(PROP_RESIDUAL));
assertTrue("Aspects must be carried", nodeAspectsTwo == nodeAspectsOne);
- assertTrue("Parent assocs must be carried", nodeParentAssocsTwo == nodeParentAssocsOne);
// Remove a property
nodeService.removeProperty(nodeRef, PROP_RESIDUAL);
@@ -723,10 +711,8 @@ public class NodeServiceTest extends TestCase
// Get the values for the previous version
Map nodePropsTwoCheck = (Map) findCacheValue(propsCache, nodeKeyTwo);
Set nodeAspectsTwoCheck = (Set) findCacheValue(aspectsCache, nodeKeyTwo);
- ParentAssocsInfo nodeParentAssocsTwoCheck = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyTwo);
assertTrue("Previous cache entries must be left alone", nodePropsTwoCheck == nodePropsTwo);
assertTrue("Previous cache entries must be left alone", nodeAspectsTwoCheck == nodeAspectsTwo);
- assertTrue("Previous cache entries must be left alone", nodeParentAssocsTwoCheck == nodeParentAssocsTwo);
// Get the current node cache key
Node nodeThree = (Node) findCacheValue(nodesCache, nodeId);
@@ -736,19 +722,16 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsThree = (Map) findCacheValue(propsCache, nodeKeyThree);
Set nodeAspectsThree = (Set) findCacheValue(aspectsCache, nodeKeyThree);
- ParentAssocsInfo nodeParentAssocsThree = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyThree);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(3L), nodeKeyThree.getVersion());
assertNotNull("No cache entry for properties", nodePropsThree);
assertNotNull("No cache entry for aspects", nodeAspectsThree);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsThree);
assertTrue("Properties must have moved on", nodePropsThree != nodePropsTwo);
assertEquals("Property count incorrect", 1, nodePropsThree.size());
assertNotNull("Expected a cm:name property", nodePropsThree.get(ContentModel.PROP_NAME));
assertNull("Expected no residual property", nodePropsThree.get(PROP_RESIDUAL));
assertTrue("Aspects must be carried", nodeAspectsThree == nodeAspectsTwo);
- assertTrue("Parent assocs must be carried", nodeParentAssocsThree == nodeParentAssocsTwo);
// Add an aspect
nodeService.addAspect(nodeRef, ContentModel.ASPECT_TITLED, null);
@@ -756,10 +739,8 @@ public class NodeServiceTest extends TestCase
// Get the values for the previous version
Map nodePropsThreeCheck = (Map) findCacheValue(propsCache, nodeKeyThree);
Set nodeAspectsThreeCheck = (Set) findCacheValue(aspectsCache, nodeKeyThree);
- ParentAssocsInfo nodeParentAssocsThreeCheck = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyThree);
assertTrue("Previous cache entries must be left alone", nodePropsThreeCheck == nodePropsThree);
assertTrue("Previous cache entries must be left alone", nodeAspectsThreeCheck == nodeAspectsThree);
- assertTrue("Previous cache entries must be left alone", nodeParentAssocsThreeCheck == nodeParentAssocsThree);
// Get the current node cache key
Node nodeFour = (Node) findCacheValue(nodesCache, nodeId);
@@ -769,17 +750,14 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsFour = (Map) findCacheValue(propsCache, nodeKeyFour);
Set nodeAspectsFour = (Set) findCacheValue(aspectsCache, nodeKeyFour);
- ParentAssocsInfo nodeParentAssocsFour = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyFour);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(4L), nodeKeyFour.getVersion());
assertNotNull("No cache entry for properties", nodePropsFour);
assertNotNull("No cache entry for aspects", nodeAspectsFour);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsFour);
assertTrue("Properties must be carried", nodePropsFour == nodePropsThree);
assertTrue("Aspects must have moved on", nodeAspectsFour != nodeAspectsThree);
assertTrue("Expected cm:titled aspect", nodeAspectsFour.contains(ContentModel.ASPECT_TITLED));
- assertTrue("Parent assocs must be carried", nodeParentAssocsFour == nodeParentAssocsThree);
// Remove an aspect
nodeService.removeAspect(nodeRef, ContentModel.ASPECT_TITLED);
@@ -787,10 +765,8 @@ public class NodeServiceTest extends TestCase
// Get the values for the previous version
Map nodePropsFourCheck = (Map) findCacheValue(propsCache, nodeKeyFour);
Set nodeAspectsFourCheck = (Set) findCacheValue(aspectsCache, nodeKeyFour);
- ParentAssocsInfo nodeParentAssocsFourCheck = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyFour);
assertTrue("Previous cache entries must be left alone", nodePropsFourCheck == nodePropsFour);
assertTrue("Previous cache entries must be left alone", nodeAspectsFourCheck == nodeAspectsFour);
- assertTrue("Previous cache entries must be left alone", nodeParentAssocsFourCheck == nodeParentAssocsFour);
// Get the current node cache key
Node nodeFive = (Node) findCacheValue(nodesCache, nodeId);
@@ -800,17 +776,14 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsFive = (Map) findCacheValue(propsCache, nodeKeyFive);
Set nodeAspectsFive = (Set) findCacheValue(aspectsCache, nodeKeyFive);
- ParentAssocsInfo nodeParentAssocsFive = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyFive);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(5L), nodeKeyFive.getVersion());
assertNotNull("No cache entry for properties", nodePropsFive);
assertNotNull("No cache entry for aspects", nodeAspectsFive);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsFive);
assertTrue("Properties must be carried", nodePropsFive == nodePropsFour);
assertTrue("Aspects must have moved on", nodeAspectsFive != nodeAspectsFour);
assertFalse("Expected no cm:titled aspect ", nodeAspectsFive.contains(ContentModel.ASPECT_TITLED));
- assertTrue("Parent assocs must be carried", nodeParentAssocsFive == nodeParentAssocsFour);
// Add an aspect, some properties and secondary association
RetryingTransactionCallback nodeSixWork = new RetryingTransactionCallback()
@@ -835,10 +808,8 @@ public class NodeServiceTest extends TestCase
// Get the values for the previous version
Map nodePropsFiveCheck = (Map) findCacheValue(propsCache, nodeKeyFive);
Set nodeAspectsFiveCheck = (Set) findCacheValue(aspectsCache, nodeKeyFive);
- ParentAssocsInfo nodeParentAssocsFiveCheck = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyFive);
assertTrue("Previous cache entries must be left alone", nodePropsFiveCheck == nodePropsFive);
assertTrue("Previous cache entries must be left alone", nodeAspectsFiveCheck == nodeAspectsFive);
- assertTrue("Previous cache entries must be left alone", nodeParentAssocsFiveCheck == nodeParentAssocsFive);
// Get the current node cache key
Node nodeSix = (Node) findCacheValue(nodesCache, nodeId);
@@ -848,13 +819,11 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsSix = (Map) findCacheValue(propsCache, nodeKeySix);
Set nodeAspectsSix = (Set) findCacheValue(aspectsCache, nodeKeySix);
- ParentAssocsInfo nodeParentAssocsSix = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeySix);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(6L), nodeKeySix.getVersion());
assertNotNull("No cache entry for properties", nodePropsSix);
assertNotNull("No cache entry for aspects", nodeAspectsSix);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsSix);
assertTrue("Properties must have moved on", nodePropsSix != nodePropsFive);
assertEquals("Property count incorrect", 3, nodePropsSix.size());
assertNotNull("Expected a cm:name property", nodePropsSix.get(ContentModel.PROP_NAME));
@@ -862,8 +831,6 @@ public class NodeServiceTest extends TestCase
assertNotNull("Expected a cm:description property", nodePropsSix.get(ContentModel.PROP_DESCRIPTION));
assertTrue("Aspects must have moved on", nodeAspectsSix != nodeAspectsFive);
assertTrue("Expected cm:titled aspect ", nodeAspectsSix.contains(ContentModel.ASPECT_TITLED));
- assertTrue("Parent assocs must have moved on", nodeParentAssocsSix != nodeParentAssocsFive);
- assertEquals("Incorrect number of parent assocs", 2, nodeParentAssocsSix.getParentAssocs().size());
// Remove an aspect, some properties and a secondary association
RetryingTransactionCallback nodeSevenWork = new RetryingTransactionCallback()
@@ -881,10 +848,8 @@ public class NodeServiceTest extends TestCase
// Get the values for the previous version
Map nodePropsSixCheck = (Map) findCacheValue(propsCache, nodeKeySix);
Set nodeAspectsSixCheck = (Set) findCacheValue(aspectsCache, nodeKeySix);
- ParentAssocsInfo nodeParentAssocsSixCheck = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeySix);
assertTrue("Previous cache entries must be left alone", nodePropsSixCheck == nodePropsSix);
assertTrue("Previous cache entries must be left alone", nodeAspectsSixCheck == nodeAspectsSix);
- assertTrue("Previous cache entries must be left alone", nodeParentAssocsSixCheck == nodeParentAssocsSix);
// Get the current node cache key
Node nodeSeven = (Node) findCacheValue(nodesCache, nodeId);
@@ -894,20 +859,16 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsSeven = (Map) findCacheValue(propsCache, nodeKeySeven);
Set nodeAspectsSeven = (Set) findCacheValue(aspectsCache, nodeKeySeven);
- ParentAssocsInfo nodeParentAssocsSeven = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeySeven);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(7L), nodeKeySeven.getVersion());
assertNotNull("No cache entry for properties", nodePropsSeven);
assertNotNull("No cache entry for aspects", nodeAspectsSeven);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsSeven);
assertTrue("Properties must have moved on", nodePropsSeven != nodePropsSix);
assertEquals("Property count incorrect", 1, nodePropsSeven.size());
assertNotNull("Expected a cm:name property", nodePropsSeven.get(ContentModel.PROP_NAME));
assertTrue("Aspects must have moved on", nodeAspectsSeven != nodeAspectsSix);
assertFalse("Expected no cm:titled aspect ", nodeAspectsSeven.contains(ContentModel.ASPECT_TITLED));
- assertTrue("Parent assocs must have moved on", nodeParentAssocsSeven != nodeParentAssocsSix);
- assertEquals("Incorrect number of parent assocs", 1, nodeParentAssocsSeven.getParentAssocs().size());
// Modify cm:auditable
RetryingTransactionCallback nodeEightWork = new RetryingTransactionCallback()
@@ -927,10 +888,8 @@ public class NodeServiceTest extends TestCase
// Get the values for the previous version
Map nodePropsSevenCheck = (Map) findCacheValue(propsCache, nodeKeySeven);
Set nodeAspectsSevenCheck = (Set) findCacheValue(aspectsCache, nodeKeySeven);
- ParentAssocsInfo nodeParentAssocsSevenCheck = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeySeven);
assertTrue("Previous cache entries must be left alone", nodePropsSevenCheck == nodePropsSeven);
assertTrue("Previous cache entries must be left alone", nodeAspectsSevenCheck == nodeAspectsSeven);
- assertTrue("Previous cache entries must be left alone", nodeParentAssocsSevenCheck == nodeParentAssocsSeven);
// Get the current node cache key
Node nodeEight = (Node) findCacheValue(nodesCache, nodeId);
@@ -940,17 +899,14 @@ public class NodeServiceTest extends TestCase
// Get the node cached values
Map nodePropsEight = (Map) findCacheValue(propsCache, nodeKeyEight);
Set nodeAspectsEight = (Set) findCacheValue(aspectsCache, nodeKeyEight);
- ParentAssocsInfo nodeParentAssocsEight = (ParentAssocsInfo) findCacheValue(parentAssocsCache, nodeKeyEight);
// Check the values
assertEquals("The node version is incorrect", Long.valueOf(8L), nodeKeyEight.getVersion());
assertNotNull("No cache entry for properties", nodePropsEight);
assertNotNull("No cache entry for aspects", nodeAspectsEight);
- assertNotNull("No cache entry for parent assocs", nodeParentAssocsEight);
assertEquals("Expected change to cm:modifier", "Fred", nodeEight.getAuditableProperties().getAuditModifier());
assertTrue("Properties must be carried", nodePropsEight == nodePropsSeven);
assertTrue("Aspects be carried", nodeAspectsEight == nodeAspectsSeven);
- assertTrue("Parent assocs must be carried", nodeParentAssocsEight == nodeParentAssocsSeven);
}
public void testCreateNodePolicies()
diff --git a/source/java/org/alfresco/repo/preference/PreferenceServiceImpl.java b/source/java/org/alfresco/repo/preference/PreferenceServiceImpl.java
index 12d194f6eb..5a29d8fbe8 100644
--- a/source/java/org/alfresco/repo/preference/PreferenceServiceImpl.java
+++ b/source/java/org/alfresco/repo/preference/PreferenceServiceImpl.java
@@ -1,19 +1,19 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
- *
- * This file is part of Alfresco
- *
- * Alfresco is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * Alfresco is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
+ * Copyright (C) 2005-2010 Alfresco Software Limited.
+ *
+ * This file is part of Alfresco
+ *
+ * Alfresco is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Alfresco is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see .
*/
package org.alfresco.repo.preference;
@@ -50,71 +50,71 @@ import org.json.JSONObject;
*/
public class PreferenceServiceImpl implements PreferenceService
{
- /** Node service */
+ /** Node service */
private NodeService nodeService;
-
+
/** Content service */
private ContentService contentService;
-
+
/** Person service */
private PersonService personService;
-
+
/** Permission Service */
- private PermissionService permissionService;
-
+ private PermissionService permissionService;
+
/** Authentication Service */
private AuthenticationContext authenticationContext;
-
+
/**
* Set the node service
*
- * @param nodeService the node service
+ * @param nodeService the node service
*/
public void setNodeService(NodeService nodeService)
{
this.nodeService = nodeService;
}
-
+
/**
* Set the content service
*
- * @param contentService the content service
+ * @param contentService the content service
*/
public void setContentService(ContentService contentService)
{
this.contentService = contentService;
}
-
+
/**
* Set the person service
*
- * @param personService the person service
+ * @param personService the person service
*/
public void setPersonService(PersonService personService)
{
this.personService = personService;
}
-
+
/**
* Set the permission service
*
- * @param permissionService the permission service
+ * @param permissionService the permission service
*/
public void setPermissionService(PermissionService permissionService)
{
this.permissionService = permissionService;
}
-
+
/**
* Set the authentication component
*
- * @param authenticationContext the authentication component
+ * @param authenticationContext the authentication component
*/
public void setAuthenticationContext(AuthenticationContext authenticationContext)
{
this.authenticationContext = authenticationContext;
}
-
+
/**
* @see org.alfresco.service.cmr.preference.PreferenceService#getPreferences(java.lang.String)
*/
@@ -122,75 +122,90 @@ public class PreferenceServiceImpl implements PreferenceService
{
return getPreferences(userName, null);
}
-
+
/**
- * @see org.alfresco.repo.person.PersonService#getPreferences(java.lang.String, java.lang.String)
+ * @see org.alfresco.repo.person.PersonService#getPreferences(java.lang.String,
+ * java.lang.String)
*/
@SuppressWarnings("unchecked")
public Map getPreferences(String userName, String preferenceFilter)
{
Map preferences = new HashMap(20);
-
+
// Get the user node reference
NodeRef personNodeRef = this.personService.getPerson(userName);
- if (personNodeRef == null)
- {
- throw new AlfrescoRuntimeException("Can not get preferences for " + userName + " because he/she does not exist.");
+ if (personNodeRef == null)
+ {
+ throw new AlfrescoRuntimeException("Can not get preferences for " + userName
+ + " because he/she does not exist.");
}
-
- try
+
+ String currentUserName = AuthenticationUtil.getFullyAuthenticatedUser();
+ if (authenticationContext.isSystemUserName(currentUserName) == true || userName.equals(currentUserName) == true
+ || AuthenticationUtil.getAdminUserName().equals(currentUserName))
{
- // Check for preferences aspect
- if (this.nodeService.hasAspect(personNodeRef, ContentModel.ASPECT_PREFERENCES) == true)
+ try
{
- // Get the preferences for this user
- JSONObject jsonPrefs = new JSONObject();
- ContentReader reader = this.contentService.getReader(personNodeRef, ContentModel.PROP_PREFERENCE_VALUES);
- if (reader != null)
+ // Check for preferences aspect
+ if (this.nodeService.hasAspect(personNodeRef, ContentModel.ASPECT_PREFERENCES) == true)
{
- jsonPrefs = new JSONObject(reader.getContentString());
- }
-
- // Build hash from preferences stored in the repository
- Iterator keys = jsonPrefs.keys();
- while (keys.hasNext())
- {
- String key = (String)keys.next();
-
- if (preferenceFilter == null ||
- preferenceFilter.length() == 0 ||
- matchPreferenceNames(key, preferenceFilter) == true)
+ // Get the preferences for this user
+ JSONObject jsonPrefs = new JSONObject();
+ ContentReader reader = this.contentService.getReader(personNodeRef,
+ ContentModel.PROP_PREFERENCE_VALUES);
+ if (reader != null)
{
- preferences.put(key, (Serializable)jsonPrefs.get(key));
+ jsonPrefs = new JSONObject(reader.getContentString());
}
- }
+
+ // Build hash from preferences stored in the repository
+ Iterator keys = jsonPrefs.keys();
+ while (keys.hasNext())
+ {
+ String key = (String) keys.next();
+
+ if (preferenceFilter == null || preferenceFilter.length() == 0
+ || matchPreferenceNames(key, preferenceFilter) == true)
+ {
+ preferences.put(key, (Serializable) jsonPrefs.get(key));
+ }
+ }
+ }
+ }
+ catch (JSONException exception)
+ {
+ throw new AlfrescoRuntimeException("Can not get preferences for " + userName
+ + " because there was an error pasing the JSON data.", exception);
}
}
- catch (JSONException exception)
+ else
{
- throw new AlfrescoRuntimeException("Can not get preferences for " + userName + " because there was an error pasing the JSON data.", exception);
+ // The current user does not have sufficient permissions to get
+ // the preferences for this user
+ throw new UnauthorizedAccessException("The current user " + currentUserName
+ + " does not have sufficient permissions to get the preferences of the user " + userName);
}
-
+
return preferences;
}
/**
* Matches the preference name to the partial preference name provided
*
- * @param name preference name
- * @param matchTo match to the partial preference name provided
- * @return boolean true if matches, false otherwise
+ * @param name preference name
+ * @param matchTo match to the partial preference name provided
+ * @return boolean true if matches, false otherwise
*/
private boolean matchPreferenceNames(String name, String matchTo)
{
boolean result = true;
-
+
// Split strings
name = name.replace(".", "+");
String[] nameArr = name.split("\\+");
matchTo = matchTo.replace(".", "+");
String[] matchToArr = matchTo.split("\\+");
-
+
int index = 0;
for (String matchToElement : matchToArr)
{
@@ -199,80 +214,87 @@ public class PreferenceServiceImpl implements PreferenceService
result = false;
break;
}
- index ++;
+ index++;
}
-
+
return result;
}
-
+
/**
- * @see org.alfresco.repo.person.PersonService#setPreferences(java.lang.String, java.util.HashMap)
+ * @see org.alfresco.repo.person.PersonService#setPreferences(java.lang.String,
+ * java.util.HashMap)
*/
public void setPreferences(final String userName, final Map preferences)
{
// Get the user node reference
final NodeRef personNodeRef = this.personService.getPerson(userName);
- if (personNodeRef == null)
- {
- throw new AlfrescoRuntimeException("Can not update preferences for " + userName + " because he/she does not exist.");
- }
-
- // Can only set preferences if the currently logged in user matches the user name being updated or
+ if (personNodeRef == null) { throw new AlfrescoRuntimeException("Can not update preferences for " + userName
+ + " because he/she does not exist."); }
+
+ // Can only set preferences if the currently logged in user matches the
+ // user name being updated or
// the user already has write permissions on the person node
String currentUserName = AuthenticationUtil.getFullyAuthenticatedUser();
- if (authenticationContext.isSystemUserName(currentUserName) == true ||
- permissionService.hasPermission(personNodeRef, PermissionService.WRITE) == AccessStatus.ALLOWED ||
- userName.equals(currentUserName) == true)
- {
+ if (authenticationContext.isSystemUserName(currentUserName) == true
+ || permissionService.hasPermission(personNodeRef, PermissionService.WRITE) == AccessStatus.ALLOWED
+ || userName.equals(currentUserName) == true)
+ {
AuthenticationUtil.runAs(new RunAsWork