From 21bb599e2067eb81193ef1e40b84a577f957eb3f Mon Sep 17 00:00:00 2001 From: Jan Vonka Date: Fri, 19 Dec 2008 10:21:51 +0000 Subject: [PATCH] Merged V3.0 to HEAD 12140: Merged V2.2 to V3.0 11732: Fixed ETWOTWO-804: Node and Transaction Cleanup Job 11747: Missed config for Node and Txn purging 11826: WCM - fix ETWOTWO-817 11951: Fixed ETWOTWO-901: NodeService cleanup must be pluggable 11961: Merged V2.1 to V2.2 11561: ETWOONE-224: when renaming duplicates during copy association names where not renamed 11583: (ALREADY PRESENT) Updated NTLM config example in web.xml - adding missing servlet mappings 11584: Fix for ETWOONE-209 - JavaScript People.createGroup() API now correctly checks for actual group name when testing for existence 11585: Fix for ETWOONE-214 - View In CIFS link now works even when users des not have view permissions on the parent folder 11612: Fix for ETWOONE-91: the description textarea in the modify space properties web form eats one leading newline each time it is submitted 11613: Fix 2.1 build and adjust implementation of ETWOONE-224 fix 11621: Fix for ETWOONE-343 11669: Improved debug from index tracking when exceptions occur 12141: Avoid annoying Spring WARN messages for ClientAbortException 12143: File that should have been deleted in CHK-5460 (rev 12140) 12177: Fix failing FS Deployment Tests since introduction of transaction check advice. git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@12507 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261 --- .../alfresco/deployment-service-context.xml | 3 + config/alfresco/node-services-context.xml | 30 +++ config/alfresco/repository.properties | 5 + config/alfresco/scheduled-jobs-context.xml | 9 +- .../alfresco/repo/copy/CopyServiceImpl.java | 54 ++-- .../repo/copy/CopyServiceImplTest.java | 29 +++ .../repo/deploy/DeploymentServiceImpl.java | 78 ++++-- .../repo/domain/hibernate/Node.hbm.xml | 29 ++- .../repo/domain/hibernate/Transaction.hbm.xml | 27 ++ .../org/alfresco/repo/jscript/People.java | 2 +- .../org/alfresco/repo/jscript/ScriptNode.java | 2 +- .../repo/node/AbstractNodeServiceImpl.java | 46 ---- .../cleanup/AbstractNodeCleanupWorker.java | 147 +++++++++++ .../NodeCleanupJob.java} | 40 ++- .../node/cleanup/NodeCleanupRegistry.java | 65 +++++ .../repo/node/cleanup/NodeCleanupWorker.java | 20 ++ .../repo/node/db/DbNodeServiceImpl.java | 142 +++------- .../repo/node/db/DbNodeServiceImplTest.java | 9 +- .../node/db/DeletedNodeCleanupWorker.java | 244 ++++++++++++++++++ .../db/IndexChildrenWhereRequiredWorker.java | 124 +++++++++ .../alfresco/repo/node/db/NodeDaoService.java | 58 ++++- .../HibernateNodeDaoServiceImpl.java | 145 +++++++++-- .../node/index/AbstractReindexComponent.java | 17 ++ .../service/cmr/repository/NodeService.java | 14 - 24 files changed, 1086 insertions(+), 253 deletions(-) create mode 100644 source/java/org/alfresco/repo/node/cleanup/AbstractNodeCleanupWorker.java rename source/java/org/alfresco/repo/node/{db/NodeServiceCleanupJob.java => cleanup/NodeCleanupJob.java} (58%) create mode 100644 source/java/org/alfresco/repo/node/cleanup/NodeCleanupRegistry.java create mode 100644 source/java/org/alfresco/repo/node/cleanup/NodeCleanupWorker.java create mode 100644 source/java/org/alfresco/repo/node/db/DeletedNodeCleanupWorker.java create mode 100644 source/java/org/alfresco/repo/node/db/IndexChildrenWhereRequiredWorker.java diff --git a/config/alfresco/deployment-service-context.xml b/config/alfresco/deployment-service-context.xml index 50e6abc632..0b1ae607e4 100644 --- a/config/alfresco/deployment-service-context.xml +++ b/config/alfresco/deployment-service-context.xml @@ -57,6 +57,9 @@ + + + diff --git a/config/alfresco/node-services-context.xml b/config/alfresco/node-services-context.xml index 366943b51e..8c9bb6de53 100644 --- a/config/alfresco/node-services-context.xml +++ b/config/alfresco/node-services-context.xml @@ -176,6 +176,36 @@ + + + + + + + + + + + + + + + + + + + + + ${index.tracking.minRecordPurgeAgeDays} + + + diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties index d9d294310e..ce8d943146 100644 --- a/config/alfresco/repository.properties +++ b/config/alfresco/repository.properties @@ -50,6 +50,11 @@ index.tracking.reindexLagMs=1000 index.tracking.maxRecordSetSize=1000 index.tracking.maxTransactionsPerLuceneCommit=100 index.tracking.disableInTransactionIndexing=false +# Index tracking information of a certain age is cleaned out by a scheduled job. +# Any clustered system that has been offline for longer than this period will need to be seeded +# with a more recent backup of the Lucene indexes or the indexes will have to be fully rebuilt. +# Use -1 to disable purging. This can be switched on at any stage. +index.tracking.minRecordPurgeAgeDays=30 # Change the failure behaviour of the configuration checker system.bootstrap.config_check.strict=true diff --git a/config/alfresco/scheduled-jobs-context.xml b/config/alfresco/scheduled-jobs-context.xml index 16c5f57c1a..67cd856866 100644 --- a/config/alfresco/scheduled-jobs-context.xml +++ b/config/alfresco/scheduled-jobs-context.xml @@ -153,12 +153,12 @@ - org.alfresco.repo.node.db.NodeServiceCleanupJob + org.alfresco.repo.node.cleanup.NodeCleanupJob - - + + @@ -170,9 +170,8 @@ - - 0 15 * * * ? + 0 0 21 * * ? diff --git a/source/java/org/alfresco/repo/copy/CopyServiceImpl.java b/source/java/org/alfresco/repo/copy/CopyServiceImpl.java index b5ed947820..f434b2063d 100644 --- a/source/java/org/alfresco/repo/copy/CopyServiceImpl.java +++ b/source/java/org/alfresco/repo/copy/CopyServiceImpl.java @@ -54,7 +54,6 @@ import org.alfresco.service.cmr.repository.AssociationRef; import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.alfresco.service.cmr.repository.CopyService; import org.alfresco.service.cmr.repository.CopyServiceException; -import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.StoreRef; @@ -71,9 +70,6 @@ import org.alfresco.service.namespace.RegexQNamePattern; import org.alfresco.util.ParameterCheck; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.tools.ant.taskdefs.War; - -import freemarker.log.Logger; /** * Node operations service implmentation. @@ -224,6 +220,9 @@ public class CopyServiceImpl implements CopyService new JavaBehaviour(this, "onCopyComplete")); } + /** + * @see org.alfresco.service.cmr.repository.CopyService#copy(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, boolean) + */ public NodeRef copy( NodeRef sourceNodeRef, NodeRef destinationParentRef, @@ -267,29 +266,36 @@ public class CopyServiceImpl implements CopyService return copy; } + /** + * @see org.alfresco.service.cmr.repository.CopyService#copyAndRename(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, boolean) + */ public NodeRef copyAndRename(NodeRef sourceNodeRef, NodeRef destinationParent, QName destinationAssocTypeQName, QName destinationQName, boolean copyChildren) { - // Make a note of the source name and do the copy + // To fix ETWOONE-224 issue it is necessary to change a QName of the new node accordingly to its name. + NodeRef result = null; String sourceName = (String)this.internalNodeService.getProperty(sourceNodeRef, ContentModel.PROP_NAME); - NodeRef copy = copy(sourceNodeRef, destinationParent, destinationAssocTypeQName, destinationQName, copyChildren); - - // Do the rename, iterating until a non-duplicate name is found - boolean bDone = false; - while (bDone == false) - { - try - { - this.internalNodeService.setProperty(copy, ContentModel.PROP_NAME, sourceName); - bDone = true; - } - catch(DuplicateChildNodeNameException exception) - { - sourceName = I18NUtil.getMessage(COPY_OF_LABEL, sourceName); - } - } - - // Return the copy - return copy; + + // Find a non-duplicate name + String newName = sourceName; + while (this.internalNodeService.getChildByName(destinationParent, destinationAssocTypeQName, newName) != null) + { + newName = I18NUtil.getMessage(COPY_OF_LABEL, newName); + } + + if (destinationQName == null) + { + // Change a QName of the new node accordingly to its name + destinationQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, QName.createValidLocalName(newName)); + } + + // Make a copy + result = copy(sourceNodeRef, destinationParent, destinationAssocTypeQName, destinationQName, copyChildren); + + // Set name property + this.internalNodeService.setProperty(result, ContentModel.PROP_NAME, newName); + + // Return new NodeRef + return result; } /** diff --git a/source/java/org/alfresco/repo/copy/CopyServiceImplTest.java b/source/java/org/alfresco/repo/copy/CopyServiceImplTest.java index 99a12de0dc..8d90575bd7 100644 --- a/source/java/org/alfresco/repo/copy/CopyServiceImplTest.java +++ b/source/java/org/alfresco/repo/copy/CopyServiceImplTest.java @@ -660,6 +660,35 @@ public class CopyServiceImplTest extends BaseSpringTest assertFalse(TEST_NAME.equals(this.nodeService.getProperty(contentCopy, ContentModel.PROP_NAME))); } + /** + * https://issues.alfresco.com/jira/browse/ETWOONE-224 + */ + public void testETWOONE_244() + { + // Create a folder and content node + Map propsFolder = new HashMap(1); + propsFolder.put(ContentModel.PROP_NAME, "tempFolder"); + NodeRef folderNode = this.nodeService.createNode(this.rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "tempFolder"), ContentModel.TYPE_FOLDER, propsFolder).getChildRef(); + Map props = new HashMap(1); + props.put(ContentModel.PROP_NAME, "myDoc.txt"); + NodeRef contentNode = this.nodeService.createNode(folderNode, ContentModel.ASSOC_CONTAINS, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "myDoc.txt"), ContentModel.TYPE_CONTENT, props).getChildRef(); + + NodeRef copy = this.copyService.copyAndRename(contentNode, folderNode, ContentModel.ASSOC_CONTAINS, null, false); + assertEquals("Copy of myDoc.txt", this.nodeService.getProperty(copy, ContentModel.PROP_NAME)); + QName copyQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Copy of myDoc.txt"); + assertEquals(copyQName, this.nodeService.getPrimaryParent(copy).getQName()); + + copy = this.copyService.copyAndRename(contentNode, folderNode, ContentModel.ASSOC_CONTAINS, null, false); + assertEquals("Copy of Copy of myDoc.txt", this.nodeService.getProperty(copy, ContentModel.PROP_NAME)); + copyQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Copy of Copy of myDoc.txt"); + assertEquals(copyQName, this.nodeService.getPrimaryParent(copy).getQName()); + + copy = this.copyService.copyAndRename(contentNode, folderNode, ContentModel.ASSOC_CONTAINS, null, false); + assertEquals("Copy of Copy of Copy of myDoc.txt", this.nodeService.getProperty(copy, ContentModel.PROP_NAME)); + copyQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Copy of Copy of Copy of myDoc.txt"); + assertEquals(copyQName, this.nodeService.getPrimaryParent(copy).getQName()); + } + /** * Check that the copied node contains the state we are expecting * diff --git a/source/java/org/alfresco/repo/deploy/DeploymentServiceImpl.java b/source/java/org/alfresco/repo/deploy/DeploymentServiceImpl.java index 92bd9684af..558f298a18 100644 --- a/source/java/org/alfresco/repo/deploy/DeploymentServiceImpl.java +++ b/source/java/org/alfresco/repo/deploy/DeploymentServiceImpl.java @@ -57,6 +57,8 @@ import org.alfresco.repo.remote.AVMSyncServiceRemote; import org.alfresco.repo.remote.ClientTicketHolder; import org.alfresco.repo.remote.ClientTicketHolderThread; import org.alfresco.repo.security.authentication.AuthenticationUtil; +import org.alfresco.repo.transaction.RetryingTransactionHelper; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.service.cmr.action.ActionService; import org.alfresco.service.cmr.action.ActionServiceTransport; import org.alfresco.service.cmr.avm.AVMException; @@ -76,6 +78,7 @@ import org.alfresco.service.cmr.remote.AVMSyncServiceTransport; import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.security.AuthenticationService; import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.NameMatcher; import org.alfresco.util.Pair; import org.apache.commons.logging.Log; @@ -100,6 +103,11 @@ public class DeploymentServiceImpl implements DeploymentService */ private AVMService fAVMService; + /** + * The local Transaction Service Instance + */ + TransactionService trxService; + /** * The Ticket holder. */ @@ -128,6 +136,15 @@ public class DeploymentServiceImpl implements DeploymentService fAVMService = service; } + /** + * Setter. + * @param trxService The instance to set. + */ + public void setTransactionService(TransactionService trxService) + { + this.trxService = trxService; + } + /* * Deploy differences to an ASR * (non-Javadoc) @@ -848,7 +865,7 @@ public class DeploymentServiceImpl implements DeploymentService SendQueueWorker[] workers = new SendQueueWorker[numberOfSendingThreads]; for(int i = 0; i < numberOfSendingThreads; i++) { - workers[i] = new SendQueueWorker(currentEffectiveUser, service, fAVMService, errors, eventQueue, sendQueue, transformers); + workers[i] = new SendQueueWorker(currentEffectiveUser, service, fAVMService, trxService, errors, eventQueue, sendQueue, transformers); workers[i].setName(workers[i].getClass().getName()); workers[i].setPriority(Thread.currentThread().getPriority()); } @@ -1329,6 +1346,7 @@ public class DeploymentServiceImpl implements DeploymentService private DeploymentReceiverService service; private String userName; private AVMService avmService; + private TransactionService trxService; List errors; List transformers; @@ -1337,6 +1355,7 @@ public class DeploymentServiceImpl implements DeploymentService SendQueueWorker(String userName, DeploymentReceiverService service, AVMService avmService, + TransactionService trxService, List errors, BlockingQueue eventQueue, BlockingQueue sendQueue, @@ -1347,11 +1366,10 @@ public class DeploymentServiceImpl implements DeploymentService this.sendQueue = sendQueue; this.service = service; this.avmService = avmService; + this.trxService = trxService; this.errors = errors; this.transformers = transformers; this.userName = userName; - - } public void run() @@ -1434,31 +1452,41 @@ public class DeploymentServiceImpl implements DeploymentService * @param dstPath where to copy the file */ private void copyFileToFSR( - AVMNodeDescriptor src, - String dstPath, - String ticket) + final AVMNodeDescriptor src, + final String dstPath, + final String ticket) { try { - InputStream in = avmService.getFileInputStream(src); - - OutputStream out = service.send(ticket, dstPath, src.getGuid()); - OutputStream baseStream = out; // finish send needs out, not a decorated stream - - // Buffer the output, we don't want to send lots of small packets - out = new BufferedOutputStream(out, 10000); - - // Call content transformers here to transform from local to network format - if(transformers != null && transformers.size() > 0) { - // yes we have pay-load transformers - for(DeploymentTransportOutputFilter transformer : transformers) - { - out = transformer.addFilter(out, src.getPath()); - } - } - - copyStream(in, out); - service.finishSend(ticket, baseStream); + // Perform copy within 'read only' transaction + RetryingTransactionHelper trx = trxService.getRetryingTransactionHelper(); + trx.setMaxRetries(1); + trx.doInTransaction(new RetryingTransactionCallback() + { + public Boolean execute() throws Exception + { + InputStream in = avmService.getFileInputStream(src); + + OutputStream out = service.send(ticket, dstPath, src.getGuid()); + OutputStream baseStream = out; // finish send needs out, not a decorated stream + + // Buffer the output, we don't want to send lots of small packets + out = new BufferedOutputStream(out, 10000); + + // Call content transformers here to transform from local to network format + if(transformers != null && transformers.size() > 0) { + // yes we have pay-load transformers + for(DeploymentTransportOutputFilter transformer : transformers) + { + out = transformer.addFilter(out, src.getPath()); + } + } + + copyStream(in, out); + service.finishSend(ticket, baseStream); + return true; + } + }, true); } catch (Exception e) { diff --git a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml index c85e7ee918..118868d4a8 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml +++ b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml @@ -249,9 +249,11 @@ select - node + node, + acl from org.alfresco.repo.domain.hibernate.NodeImpl as node + left outer join node.accessControlList as acl where node.store.id = :storeId and node.uuid = :uuid @@ -531,7 +533,7 @@ assoc.id - + select parent.id, parentStore.protocol, @@ -542,8 +544,10 @@ join assoc.parent as parent join parent.store as parentStore join assoc.child as child + join child.store as childStore where - child.store.id != parent.store.id and + parentStore.id = :parentStoreId and + childStore.id != :parentStoreId and parent.id > :minNodeId and assoc.isPrimary = true order by @@ -704,4 +708,23 @@ props.serializableValue is not null + + = :minNodeId and + node.deleted = true and + txn.id <= :maxTxnId + order by + node.id asc + ]]> + + diff --git a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml index 2accb710a4..af0902e554 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml +++ b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml @@ -88,6 +88,17 @@ org.alfresco.repo.domain.hibernate.TransactionImpl as txn + + + + + + = :minTxnId and + txn.commitTimeMs <= :maxCommitTime + order by + txn.id asc + ]]> + + diff --git a/source/java/org/alfresco/repo/jscript/People.java b/source/java/org/alfresco/repo/jscript/People.java index ce3d6c607f..5f26f6f328 100644 --- a/source/java/org/alfresco/repo/jscript/People.java +++ b/source/java/org/alfresco/repo/jscript/People.java @@ -413,7 +413,7 @@ public final class People extends BaseScopableProcessorExtension ScriptNode group = null; String actualName = services.getAuthorityService().getName(AuthorityType.GROUP, groupName); - if (authorityService.authorityExists(groupName) == false) + if (authorityService.authorityExists(actualName) == false) { String parentGroupName = null; if (parentGroup != null) diff --git a/source/java/org/alfresco/repo/jscript/ScriptNode.java b/source/java/org/alfresco/repo/jscript/ScriptNode.java index e2a137b6d9..49451d5550 100644 --- a/source/java/org/alfresco/repo/jscript/ScriptNode.java +++ b/source/java/org/alfresco/repo/jscript/ScriptNode.java @@ -1409,7 +1409,7 @@ public class ScriptNode implements Serializable, Scopeable if (destination.getNodeRef().getStoreRef().getProtocol().equals(StoreRef.PROTOCOL_WORKSPACE)) { NodeRef copyRef = this.services.getCopyService().copyAndRename(this.nodeRef, destination.getNodeRef(), - ContentModel.ASSOC_CONTAINS, getPrimaryParentAssoc().getQName(), deepCopy); + ContentModel.ASSOC_CONTAINS, null, deepCopy); copy = newInstance(copyRef, this.services, this.scope); } else diff --git a/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java b/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java index b4188cc8a5..41f87a98df 100644 --- a/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java +++ b/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java @@ -30,7 +30,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.locks.ReentrantLock; import org.alfresco.model.ContentModel; import org.alfresco.repo.node.NodeServicePolicies.BeforeAddAspectPolicy; @@ -59,8 +58,6 @@ import org.alfresco.repo.policy.AssociationPolicyDelegate; import org.alfresco.repo.policy.ClassPolicyDelegate; import org.alfresco.repo.policy.PolicyComponent; import org.alfresco.repo.search.Indexer; -import org.alfresco.repo.security.authentication.AuthenticationUtil; -import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork; import org.alfresco.service.cmr.dictionary.ClassDefinition; import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.dictionary.DictionaryService; @@ -648,47 +645,4 @@ public abstract class AbstractNodeServiceImpl implements NodeService } return properties; } - - /** - * Override to implement cleanup processes. The default does nothing. - *

- * This method will be called as the system user but without any - * additional transactions. - */ - protected List cleanupImpl() - { - // No operation - return Collections.emptyList(); - } - - /** Prevent multiple executions of the implementation method */ - private ReentrantLock cleanupLock = new ReentrantLock(); - public final List cleanup() - { - boolean locked = cleanupLock.tryLock(); - if (locked) - { - try - { - // Authenticate as system - RunAsWork> cleanupWork = new RunAsWork>() - { - public List doWork() throws Exception - { - // The current thread got the lock - return cleanupImpl(); - } - }; - return AuthenticationUtil.runAs(cleanupWork, AuthenticationUtil.SYSTEM_USER_NAME); - } - finally - { - cleanupLock.unlock(); - } - } - else - { - return Collections.emptyList(); - } - } } diff --git a/source/java/org/alfresco/repo/node/cleanup/AbstractNodeCleanupWorker.java b/source/java/org/alfresco/repo/node/cleanup/AbstractNodeCleanupWorker.java new file mode 100644 index 0000000000..ca94e17aa5 --- /dev/null +++ b/source/java/org/alfresco/repo/node/cleanup/AbstractNodeCleanupWorker.java @@ -0,0 +1,147 @@ +package org.alfresco.repo.node.cleanup; + +import java.util.Collections; +import java.util.List; +import java.util.concurrent.locks.ReentrantLock; + +import org.alfresco.error.StackTraceUtil; +import org.alfresco.repo.node.db.DbNodeServiceImpl; +import org.alfresco.repo.node.db.NodeDaoService; +import org.alfresco.repo.security.authentication.AuthenticationUtil; +import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.PropertyCheck; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Base class for Node cleaners. This class ensures calls through + * after having created a read-write transaction that is authenticated + * as system. + * + * @author Derek Hulley + * @since 2.2 SP2 + */ +public abstract class AbstractNodeCleanupWorker implements NodeCleanupWorker +{ + protected final Log logger; + private final ReentrantLock cleanupLock; + + private NodeCleanupRegistry registry; + protected TransactionService transactionService; + protected DbNodeServiceImpl dbNodeService; + protected NodeDaoService nodeDaoService; + + public AbstractNodeCleanupWorker() + { + logger = LogFactory.getLog(this.getClass()); + cleanupLock = new ReentrantLock(); + } + + public void setRegistry(NodeCleanupRegistry registry) + { + this.registry = registry; + } + + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setDbNodeService(DbNodeServiceImpl dbNodeService) + { + this.dbNodeService = dbNodeService; + } + + public void setNodeDaoService(NodeDaoService nodeDaoService) + { + this.nodeDaoService = nodeDaoService; + } + + public void register() + { + PropertyCheck.mandatory(this, "registry", registry); + PropertyCheck.mandatory(this, "transactionService", transactionService); + PropertyCheck.mandatory(this, "dbNodeService", dbNodeService); + PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService); + + registry.register(this); + } + + /** + * Calls {@link #doCleanInternal()} in a System-user authenticated read-write transaction. + * This method is non-blocking but passes all second and subsequent concurrent invocations + * straight through. + */ + public List doClean() + { + /** Prevent multiple executions of the implementation method */ + boolean locked = cleanupLock.tryLock(); + if (locked) + { + try + { + return doCleanWithTxn(); + } + catch (Throwable e) + { + if (logger.isDebugEnabled()) + { + StringBuilder sb = new StringBuilder(1024); + StackTraceUtil.buildStackTrace( + "Node cleanup failed: " + + " Worker: " + this.getClass().getName() + "\n" + + " Error: ", + e.getStackTrace(), + sb, + Integer.MAX_VALUE); + logger.debug(sb.toString()); + } + StringBuilder sb = new StringBuilder(1024); + StackTraceUtil.buildStackTrace( + "Node cleanup failed: " + + " Worker: " + this.getClass().getName() + "\n" + + " Error: ", + e.getStackTrace(), + sb, + 20); + return Collections.singletonList(sb.toString()); + } + finally + { + cleanupLock.unlock(); + } + } + else + { + return Collections.emptyList(); + } + } + + private List doCleanWithTxn() + { + final RetryingTransactionCallback> doCleanCallback = new RetryingTransactionCallback>() + { + public List execute() throws Throwable + { + return doCleanInternal(); + } + }; + final RunAsWork> doCleanRunAs = new RunAsWork>() + { + public List doWork() throws Exception + { + return transactionService.getRetryingTransactionHelper().doInTransaction(doCleanCallback, false, true); + } + }; + return AuthenticationUtil.runAs(doCleanRunAs, AuthenticationUtil.getSystemUserName()); + } + + /** + * Do the actual cleanup. Any errors are handled by this base class. + * + * @return Returns the cleanup messages. + */ + protected abstract List doCleanInternal() throws Throwable; +} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/node/db/NodeServiceCleanupJob.java b/source/java/org/alfresco/repo/node/cleanup/NodeCleanupJob.java similarity index 58% rename from source/java/org/alfresco/repo/node/db/NodeServiceCleanupJob.java rename to source/java/org/alfresco/repo/node/cleanup/NodeCleanupJob.java index 727e159462..aee4476893 100644 --- a/source/java/org/alfresco/repo/node/db/NodeServiceCleanupJob.java +++ b/source/java/org/alfresco/repo/node/cleanup/NodeCleanupJob.java @@ -22,36 +22,50 @@ * the FLOSS exception, and it is also available here: * http://www.alfresco.com/legal/licensing" */ -package org.alfresco.repo.node.db; +package org.alfresco.repo.node.cleanup; + +import java.util.List; import org.alfresco.error.AlfrescoRuntimeException; -import org.alfresco.service.cmr.repository.NodeService; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.quartz.Job; import org.quartz.JobDataMap; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; /** - * Prompts the Node Service to perform regular cleanup operations. - * - * @see NodeService#cleanup() + * Scheduled job to call a {@link NodeCleanupWorker}. + *

+ * Job data is: nodeCleanupWorker * * @author Derek Hulley - * @since 2.1.6 + * @since 2.2SP2 */ -public class NodeServiceCleanupJob implements Job +public class NodeCleanupJob implements Job { + private static Log logger = LogFactory.getLog(NodeCleanupJob.class); + public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobData = context.getJobDetail().getJobDataMap(); - // extract the content cleaner to use - Object nodeServiceObj = jobData.get("nodeService"); - if (nodeServiceObj == null || !(nodeServiceObj instanceof NodeService)) + // extract the content Cleanup to use + Object nodeCleanupWorkerObj = jobData.get("nodeCleanupWorker"); + if (nodeCleanupWorkerObj == null || !(nodeCleanupWorkerObj instanceof NodeCleanupWorker)) { throw new AlfrescoRuntimeException( - "NodeServiceCleanupJob data must contain valid 'nodeService' reference"); + "NodeCleanupJob data must contain valid 'nodeCleanupWorker' reference"); + } + NodeCleanupWorker nodeCleanupWorker = (NodeCleanupWorker) nodeCleanupWorkerObj; + List cleanupLog = nodeCleanupWorker.doClean(); + // Done + if (logger.isDebugEnabled()) + { + logger.debug("Node cleanup log:"); + for (String log : cleanupLog) + { + logger.debug(log); + } } - NodeService nodeService = (NodeService) nodeServiceObj; - nodeService.cleanup(); } } diff --git a/source/java/org/alfresco/repo/node/cleanup/NodeCleanupRegistry.java b/source/java/org/alfresco/repo/node/cleanup/NodeCleanupRegistry.java new file mode 100644 index 0000000000..91a4c7c1a6 --- /dev/null +++ b/source/java/org/alfresco/repo/node/cleanup/NodeCleanupRegistry.java @@ -0,0 +1,65 @@ +package org.alfresco.repo.node.cleanup; + +import java.util.ArrayList; +import java.util.List; + +import org.alfresco.error.StackTraceUtil; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * A {@link NodeCleanupWorker worker} that aggregates any number of + * {@link #register(NodeCleanupWorker) registered} workers. + * + * @author Derek Hulley + * @since 2.2 SP2 + */ +public class NodeCleanupRegistry implements NodeCleanupWorker +{ + private static Log logger = LogFactory.getLog(NodeCleanupRegistry.class); + + private List cleanupWorkers; + + public NodeCleanupRegistry() + { + cleanupWorkers = new ArrayList(5); + } + + public void register(NodeCleanupWorker cleanupWorker) + { + cleanupWorkers.add(cleanupWorker); + } + + /** + * Calls all registered cleaners in order, without transactions or authentication. + * The return messages are aggregated. + */ + public List doClean() + { + List results = new ArrayList(100); + for (NodeCleanupWorker cleanupWorker : cleanupWorkers) + { + try + { + results.addAll(cleanupWorker.doClean()); + } + catch (Throwable e) + { + // This failed. The cleaner should be handling this, but we can't guarantee it. + logger.error( + "NodeCleanupWork doesn't handle all exception conditions: " + + cleanupWorker.getClass().getName()); + StringBuilder sb = new StringBuilder(1024); + StackTraceUtil.buildStackTrace( + "Node cleanup failed: " + + " Worker: " + cleanupWorker.getClass().getName() + "\n" + + " Error: ", + e.getStackTrace(), + sb, + 20); + results.add(sb.toString()); + } + } + return results; + } +} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/node/cleanup/NodeCleanupWorker.java b/source/java/org/alfresco/repo/node/cleanup/NodeCleanupWorker.java new file mode 100644 index 0000000000..567b90e9e9 --- /dev/null +++ b/source/java/org/alfresco/repo/node/cleanup/NodeCleanupWorker.java @@ -0,0 +1,20 @@ +package org.alfresco.repo.node.cleanup; + +import java.util.List; + +/** + * Interface for classes that implement a snippet of node cleanup. + * + * @author Derek Hulley + * @since 2.2 SP2 + */ +public interface NodeCleanupWorker +{ + /** + * Perform some work to clean up data. All errors must be handled and converted + * to error messages. + * + * @return Returns a list of informational messages. + */ + List doClean(); +} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java index 00930054a8..3e941fc321 100644 --- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java +++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java @@ -42,6 +42,7 @@ import org.alfresco.model.ContentModel; import org.alfresco.repo.domain.Node; import org.alfresco.repo.node.AbstractNodeServiceImpl; import org.alfresco.repo.node.StoreArchiveMap; +import org.alfresco.repo.node.cleanup.AbstractNodeCleanupWorker; import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback; import org.alfresco.repo.node.index.NodeIndexer; import org.alfresco.repo.security.authentication.AuthenticationUtil; @@ -185,7 +186,12 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl public List getStores() { // Get the ADM stores - List storeRefs = nodeDaoService.getStoreRefs(); + List> stores = nodeDaoService.getStores(); + List storeRefs = new ArrayList(50); + for (Pair pair : stores) + { + storeRefs.add(pair.getSecond()); + } // Now get the AVMStores. List avmStores = avmNodeService.getStores(); storeRefs.addAll(avmStores); @@ -2059,7 +2065,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl } } - private void indexChildren(Pair nodePair, boolean cascade) + public void indexChildren(Pair nodePair, boolean cascade) { Long nodeId = nodePair.getFirst(); // Get the node's children, but only one's that aren't in the same store @@ -2162,21 +2168,29 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl } } - @Override - protected List cleanupImpl() + public static class MoveChildrenToCorrectStore extends AbstractNodeCleanupWorker { - List moveChildrenResults = moveChildrenToCorrectStore(); - List indexChildrenResults = indexChildrenWhereRequired(); - - List allResults = new ArrayList(100); - allResults.addAll(moveChildrenResults); - allResults.addAll(indexChildrenResults); - - // Done - return allResults; - } + @Override + protected List doCleanInternal() throws Throwable + { + return dbNodeService.moveChildrenToCorrectStore(); + } + }; private List moveChildrenToCorrectStore() + { + List results = new ArrayList(1000); + // Repeat the process for each store + List> storePairs = nodeDaoService.getStores(); + for (Pair storePair : storePairs) + { + List storeResults = moveChildrenToCorrectStore(storePair.getFirst()); + results.addAll(storeResults); + } + return results; + } + + private List moveChildrenToCorrectStore(final Long storeId) { final List> parentNodePairs = new ArrayList>(100); final NodeRefQueryCallback callback = new NodeRefQueryCallback() @@ -2191,7 +2205,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl { public Object execute() throws Throwable { - nodeDaoService.getNodesWithChildrenInDifferentStores(Long.MIN_VALUE, 100, callback); + nodeDaoService.getNodesWithChildrenInDifferentStore(storeId, Long.MIN_VALUE, 100, callback); // Done return null; } @@ -2226,11 +2240,19 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl catch (Throwable e) { String msg = - "Failed to move child nodes to parent node's store: \n" + + "Failed to move child nodes to parent node's store." + + " Set log level to WARN for this class to get exception log: \n" + " Parent node: " + parentNodePair.getFirst() + "\n" + " Error: " + e.getMessage(); - // It failed, which is not an error to consider here - logger.warn(msg, e); + // It failed; do a full log in WARN mode + if (logger.isWarnEnabled()) + { + logger.warn(msg, e); + } + else + { + logger.error(msg); + } results.add(msg); } } @@ -2248,88 +2270,4 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl } return results; } - - private List indexChildrenWhereRequired() - { - final List> parentNodePairs = new ArrayList>(100); - final NodeRefQueryCallback callback = new NodeRefQueryCallback() - { - public boolean handle(Pair nodePair) - { - parentNodePairs.add(nodePair); - return true; - } - }; - RetryingTransactionCallback getNodesCallback = new RetryingTransactionCallback() - { - public Object execute() throws Throwable - { - nodeDaoService.getNodesWithAspect(ContentModel.ASPECT_INDEX_CHILDREN, Long.MIN_VALUE, 100, callback); - // Done - return null; - } - }; - transactionService.getRetryingTransactionHelper().doInTransaction(getNodesCallback, true, true); - // Process the nodes in random order - Collections.shuffle(parentNodePairs); - // Iterate and operate - List results = new ArrayList(100); - for (final Pair parentNodePair : parentNodePairs) - { - RetryingTransactionCallback indexChildrenCallback = new RetryingTransactionCallback() - { - public String execute() throws Throwable - { - // Index children without full cascade - indexChildren(parentNodePair, true); - // Done - return null; - } - }; - RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper(); - txnHelper.setMaxRetries(1); - try - { - txnHelper.doInTransaction(indexChildrenCallback, false, true); - String msg = - "Indexed child nodes: \n" + - " Parent node: " + parentNodePair.getFirst(); - results.add(msg); - } - catch (Throwable e) - { - String msg = - "Failed to index child nodes: \n" + - " Parent node: " + parentNodePair.getFirst() + "\n" + - " Error: " + e.getMessage(); - // It failed, which is not an error to consider here - logger.warn(msg, e); - results.add(msg); - } - } - // Done - if (logger.isDebugEnabled()) - { - StringBuilder sb = new StringBuilder(256); - sb.append("Indexed child nodes: \n") - .append(" Results:\n"); - for (String msg : results) - { - sb.append(" ").append(msg).append("\n"); - } - logger.debug(sb.toString()); - } - return results; - } - - /** - * Cleans up transactions and deleted nodes that are older than the given minimum age. - * - * @param minAge the minimum age of a transaction or deleted node - * @return Returns log message results - */ - private List cleanUpTransactions(long minAge) - { - return null; - } } diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java index a15c6ea3e0..6c09af5ffa 100644 --- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java +++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java @@ -39,6 +39,7 @@ import org.alfresco.model.ContentModel; import org.alfresco.repo.content.MimetypeMap; import org.alfresco.repo.node.BaseNodeServiceTest; import org.alfresco.repo.node.StoreArchiveMap; +import org.alfresco.repo.node.cleanup.NodeCleanupRegistry; import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler; import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; @@ -476,8 +477,14 @@ public class DbNodeServiceImplTest extends BaseNodeServiceTest setComplete(); endTransaction(); + NodeCleanupRegistry nodeCleanupRegistry = new NodeCleanupRegistry(); + DbNodeServiceImpl.MoveChildrenToCorrectStore worker = new DbNodeServiceImpl.MoveChildrenToCorrectStore(); + worker.setTransactionService(transactionService); + worker.setDbNodeService(ns); + worker.setNodeDaoService(nodeDaoService); + // Run cleanup - ns.cleanup(); + worker.doClean(); } /** diff --git a/source/java/org/alfresco/repo/node/db/DeletedNodeCleanupWorker.java b/source/java/org/alfresco/repo/node/db/DeletedNodeCleanupWorker.java new file mode 100644 index 0000000000..162fe58dbc --- /dev/null +++ b/source/java/org/alfresco/repo/node/db/DeletedNodeCleanupWorker.java @@ -0,0 +1,244 @@ +package org.alfresco.repo.node.db; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.alfresco.repo.node.cleanup.AbstractNodeCleanupWorker; +import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback; +import org.alfresco.repo.transaction.RetryingTransactionHelper; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.util.Pair; +import org.apache.commons.lang.mutable.MutableLong; + +/** + * Cleans up deleted nodes and dangling transactions that are old enough. + * + * @author Derek Hulley + * @since 2.2 SP2 + */ +public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker +{ + private long minPurgeAgeMs; + + /** + * Default constructor + */ + public DeletedNodeCleanupWorker() + { + minPurgeAgeMs = 7L * 24L * 3600L * 1000L; + } + + /** + * {@inheritDoc} + */ + protected List doCleanInternal() throws Throwable + { + List purgedNodes = purgeOldDeletedNodes(minPurgeAgeMs); + List purgedTxns = purgeOldEmptyTransactions(minPurgeAgeMs); + + List allResults = new ArrayList(100); + allResults.addAll(purgedNodes); + allResults.addAll(purgedTxns); + + // Done + return allResults; + } + + /** + * Set the minimum age (days) that nodes and transactions must be before they get purged. + * The default is 7 days. + * + * @param minPurgeAgeDays the minimum age (in days) before nodes and transactions get purged + */ + public void setMinPurgeAgeDays(int minPurgeAgeDays) + { + this.minPurgeAgeMs = ((long) minPurgeAgeDays) * 24L * 3600L * 1000L; + } + + private static final int NODE_PURGE_BATCH_SIZE = 1000; + /** + * Cleans up deleted nodes that are older than the given minimum age. + * + * @param minAge the minimum age of a transaction or deleted node + * @return Returns log message results + */ + private List purgeOldDeletedNodes(long minAge) + { + if (minAge < 0) + { + return Collections.emptyList(); + } + final List results = new ArrayList(100); + final MutableLong minNodeId = new MutableLong(0L); + + final long maxCommitTime = System.currentTimeMillis() - minAge; + RetryingTransactionCallback purgeNodesCallback = new RetryingTransactionCallback() + { + public Integer execute() throws Throwable + { + final List> nodePairs = new ArrayList>(NODE_PURGE_BATCH_SIZE); + NodeRefQueryCallback callback = new NodeRefQueryCallback() + { + public boolean handle(Pair nodePair) + { + nodePairs.add(nodePair); + return true; + } + }; + nodeDaoService.getNodesDeletedInOldTxns(minNodeId.longValue(), maxCommitTime, NODE_PURGE_BATCH_SIZE, callback); + for (Pair nodePair : nodePairs) + { + Long nodeId = nodePair.getFirst(); + nodeDaoService.purgeNode(nodeId); + // Update the min node ID for the next query + if (nodeId.longValue() > minNodeId.longValue()) + { + minNodeId.setValue(nodeId.longValue()); + } + } + return nodePairs.size(); + } + }; + while (true) + { + RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper(); + txnHelper.setMaxRetries(5); // Limit number of retries + txnHelper.setRetryWaitIncrementMs(1000); // 1 second to allow other cleanups time to get through + // Get nodes to delete + Integer purgeCount = new Integer(0); + // Purge nodes + try + { + purgeCount = txnHelper.doInTransaction(purgeNodesCallback, false, true); + if (purgeCount.intValue() > 0) + { + String msg = + "Purged old nodes: \n" + + " Min node ID: " + minNodeId.longValue() + "\n" + + " Batch size: " + NODE_PURGE_BATCH_SIZE + "\n" + + " Max commit time: " + maxCommitTime + "\n" + + " Purge count: " + purgeCount; + results.add(msg); + } + } + catch (Throwable e) + { + String msg = + "Failed to purge nodes." + + " Set log level to WARN for this class to get exception log: \n" + + " Min node ID: " + minNodeId.longValue() + "\n" + + " Batch size: " + NODE_PURGE_BATCH_SIZE + "\n" + + " Max commit time: " + maxCommitTime + "\n" + + " Error: " + e.getMessage(); + // It failed; do a full log in WARN mode + if (logger.isWarnEnabled()) + { + logger.warn(msg, e); + } + else + { + logger.error(msg); + } + results.add(msg); + break; + } + if (purgeCount.intValue() == 0) + { + break; + } + } + // Done + return results; + } + + private static final int TXN_PURGE_BATCH_SIZE = 50; + /** + * Cleans up unused transactions that are older than the given minimum age. + * + * @param minAge the minimum age of a transaction or deleted node + * @return Returns log message results + */ + private List purgeOldEmptyTransactions(long minAge) + { + if (minAge < 0) + { + return Collections.emptyList(); + } + final List results = new ArrayList(100); + final MutableLong minTxnId = new MutableLong(0L); + + final long maxCommitTime = System.currentTimeMillis() - minAge; + RetryingTransactionCallback purgeTxnsCallback = new RetryingTransactionCallback() + { + public Integer execute() throws Throwable + { + final List txnIds = nodeDaoService.getTxnsUnused( + minTxnId.longValue(), + maxCommitTime, + TXN_PURGE_BATCH_SIZE); + for (Long txnId : txnIds) + { + nodeDaoService.purgeTxn(txnId); + // Update the min node ID for the next query + if (txnId.longValue() > minTxnId.longValue()) + { + minTxnId.setValue(txnId.longValue()); + } + } + return txnIds.size(); + } + }; + while (true) + { + RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper(); + txnHelper.setMaxRetries(5); // Limit number of retries + txnHelper.setRetryWaitIncrementMs(1000); // 1 second to allow other cleanups time to get through + // Get nodes to delete + Integer purgeCount = new Integer(0); + // Purge nodes + try + { + purgeCount = txnHelper.doInTransaction(purgeTxnsCallback, false, true); + if (purgeCount.intValue() > 0) + { + String msg = + "Purged old txns: \n" + + " Min txn ID: " + minTxnId.longValue() + "\n" + + " Batch size: " + TXN_PURGE_BATCH_SIZE + "\n" + + " Max commit time: " + maxCommitTime + "\n" + + " Purge count: " + purgeCount; + results.add(msg); + } + } + catch (Throwable e) + { + String msg = + "Failed to purge txns." + + " Set log level to WARN for this class to get exception log: \n" + + " Min txn ID: " + minTxnId.longValue() + "\n" + + " Batch size: " + TXN_PURGE_BATCH_SIZE + "\n" + + " Max commit time: " + maxCommitTime + "\n" + + " Error: " + e.getMessage(); + // It failed; do a full log in WARN mode + if (logger.isWarnEnabled()) + { + logger.warn(msg, e); + } + else + { + logger.error(msg); + } + results.add(msg); + break; + } + if (purgeCount.intValue() == 0) + { + break; + } + } + // Done + return results; + } +} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/node/db/IndexChildrenWhereRequiredWorker.java b/source/java/org/alfresco/repo/node/db/IndexChildrenWhereRequiredWorker.java new file mode 100644 index 0000000000..4ebfbcfced --- /dev/null +++ b/source/java/org/alfresco/repo/node/db/IndexChildrenWhereRequiredWorker.java @@ -0,0 +1,124 @@ +package org.alfresco.repo.node.db; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.alfresco.model.ContentModel; +import org.alfresco.repo.node.cleanup.AbstractNodeCleanupWorker; +import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback; +import org.alfresco.repo.transaction.RetryingTransactionHelper; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.util.Pair; + +/** + * Indexes child nodes where cascade re-indexing is disabled. + * + * @author Derek Hulley + * @since 2.2 SP2 + */ +public class IndexChildrenWhereRequiredWorker extends AbstractNodeCleanupWorker +{ + /** + * Default constructor + */ + public IndexChildrenWhereRequiredWorker() + { + } + + /** + * {@inheritDoc} + */ + protected List doCleanInternal() throws Throwable + { + List indexChildrenResults = indexChildrenWhereRequired(); + + List allResults = new ArrayList(100); + allResults.addAll(indexChildrenResults); + + // Done + return allResults; + } + + private List indexChildrenWhereRequired() + { + final List> parentNodePairs = new ArrayList>(100); + final NodeRefQueryCallback callback = new NodeRefQueryCallback() + { + public boolean handle(Pair nodePair) + { + parentNodePairs.add(nodePair); + return true; + } + }; + RetryingTransactionCallback getNodesCallback = new RetryingTransactionCallback() + { + public Object execute() throws Throwable + { + nodeDaoService.getNodesWithAspect(ContentModel.ASPECT_INDEX_CHILDREN, Long.MIN_VALUE, 100, callback); + // Done + return null; + } + }; + transactionService.getRetryingTransactionHelper().doInTransaction(getNodesCallback, true, true); + // Process the nodes in random order + Collections.shuffle(parentNodePairs); + // Iterate and operate + List results = new ArrayList(100); + for (final Pair parentNodePair : parentNodePairs) + { + RetryingTransactionCallback indexChildrenCallback = new RetryingTransactionCallback() + { + public String execute() throws Throwable + { + // Index children without full cascade + dbNodeService.indexChildren(parentNodePair, true); + // Done + return null; + } + }; + RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper(); + txnHelper.setMaxRetries(1); + try + { + txnHelper.doInTransaction(indexChildrenCallback, false, true); + String msg = + "Indexed child nodes: \n" + + " Parent node: " + parentNodePair.getFirst(); + results.add(msg); + } + catch (Throwable e) + { + String msg = + "Failed to index child nodes." + + " Set log level to WARN for this class to get exception log: \n" + + " Parent node: " + parentNodePair.getFirst() + "\n" + + " Error: " + e.getMessage(); + // It failed; do a full log in WARN mode + if (logger.isWarnEnabled()) + { + logger.warn(msg, e); + } + else + { + logger.error(msg); + } + results.add(msg); + } + } + // Done + if (logger.isDebugEnabled()) + { + StringBuilder sb = new StringBuilder(256); + sb.append("Indexed child nodes: \n") + .append(" Results:\n"); + for (String msg : results) + { + sb.append(" ").append(msg).append("\n"); + } + logger.debug(sb.toString()); + } + return results; + } +} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/node/db/NodeDaoService.java b/source/java/org/alfresco/repo/node/db/NodeDaoService.java index d977e33534..730f638e30 100644 --- a/source/java/org/alfresco/repo/node/db/NodeDaoService.java +++ b/source/java/org/alfresco/repo/node/db/NodeDaoService.java @@ -69,7 +69,7 @@ public interface NodeDaoService * @return Returns a list of stores */ @DirtySessionAnnotation(markDirty=false) - public List getStoreRefs(); + public List> getStores(); @DirtySessionAnnotation(markDirty=false) public Pair getRootNode(StoreRef storeRef); @@ -164,11 +164,19 @@ public interface NodeDaoService public boolean hasNodeAspect(Long nodeId, QName aspectQName); /** - * Deletes the node and all entities + * Deletes the node and all entities. Note that the node entry will still exist and be + * associated with a live transaction. */ @DirtySessionAnnotation(markDirty=true) public void deleteNode(Long nodeId); + /** + * Remove all traces of the node. This assumes that the node has been marked + * for deletion using {@link #deleteNode(Long)}. + */ + @DirtySessionAnnotation(markDirty=true) + public void purgeNode(Long nodeId); + /** * @param name the cm:name to apply to the association * @return Returns the persisted and filled association's ID @@ -286,8 +294,21 @@ public interface NodeDaoService boolean handle(Pair nodePair); } + /** + * Gets a set of nodes that have parents in the given store, but are themselves located in a different + * store. + * + * @param storeId the store of the parent nodes + * @param minNodeId the min node ID to return + * @param count the maximum number of results + * @param resultsCallback the node callback + */ @DirtySessionAnnotation(markDirty=false) - public void getNodesWithChildrenInDifferentStores(Long minNodeId, int count, NodeRefQueryCallback resultsCallback); + public void getNodesWithChildrenInDifferentStore( + Long storeId, + Long minNodeId, + int count, + NodeRefQueryCallback resultsCallback); @DirtySessionAnnotation(markDirty=false) public void getNodesWithAspect(QName aspectQName, Long minNodeId, int count, NodeRefQueryCallback resultsCallback); @@ -454,6 +475,17 @@ public interface NodeDaoService @DirtySessionAnnotation(markDirty=true) public void getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition, NodePropertyHandler handler); + /** + * Gets a batch of deleted nodes in old transactions. + * + * @param minNodeId the minimum node ID + * @param maxCommitTime the maximum commit time (to set a minimum transaction age) + * @param count the maximum number of results (for batching) + * @param resultsCallback the callback to pass results back + */ + @DirtySessionAnnotation(markDirty=false) + public void getNodesDeletedInOldTxns(Long minNodeId, long maxCommitTime, int count, NodeRefQueryCallback resultsCallback); + /** * Iterface to handle callbacks when iterating over properties * @@ -465,6 +497,20 @@ public interface NodeDaoService void handle(NodeRef nodeRef, QName nodeTypeQName, QName propertyQName, Serializable value); } + /** + * Retrieves the maximum transaction ID for which the commit time is less than the given time. + * + * @param maxCommitTime the max commit time (ms) + * @return the last transaction on or before the given time + */ + @DirtySessionAnnotation(markDirty=true) + public Long getMaxTxnIdByCommitTime(final long maxCommitTime); + /** + * Retrieves a specific transaction. + * + * @param txnId the unique transaction ID. + * @return the requested transaction or null + */ @DirtySessionAnnotation(markDirty=true) public Transaction getTxnById(long txnId); /** @@ -518,4 +564,10 @@ public interface NodeDaoService @DirtySessionAnnotation(markDirty=false) public List getTxnChanges(final long txnId); + + @DirtySessionAnnotation(markDirty=false) + public List getTxnsUnused(Long minTxnId, long maxCommitTime, int count); + + @DirtySessionAnnotation(markDirty=true) + public void purgeTxn(Long txnId); } diff --git a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java index 7334ba953a..8653b43e1f 100644 --- a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java +++ b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java @@ -107,6 +107,7 @@ import org.alfresco.util.Pair; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Criteria; +import org.hibernate.ObjectNotFoundException; import org.hibernate.Query; import org.hibernate.ScrollMode; import org.hibernate.ScrollableResults; @@ -137,7 +138,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements private static final String QUERY_GET_CHILD_ASSOC_REFS_BY_CHILD_TYPEQNAME = "node.GetChildAssocRefsByChildTypeQName"; private static final String QUERY_GET_PRIMARY_CHILD_ASSOCS = "node.GetPrimaryChildAssocs"; private static final String QUERY_GET_PRIMARY_CHILD_ASSOCS_NOT_IN_SAME_STORE = "node.GetPrimaryChildAssocsNotInSameStore"; - private static final String QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORES ="node.GetNodesWithChildrenInDifferentStores"; + private static final String QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORE ="node.GetNodesWithChildrenInDifferentStore"; private static final String QUERY_GET_NODES_WITH_ASPECT ="node.GetNodesWithAspect"; private static final String QUERY_GET_PARENT_ASSOCS = "node.GetParentAssocs"; private static final String QUERY_GET_NODE_ASSOC = "node.GetNodeAssoc"; @@ -149,6 +150,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements private static final String QUERY_GET_USERS_WITHOUT_USAGE = "node.GetUsersWithoutUsage"; private static final String QUERY_GET_USERS_WITH_USAGE = "node.GetUsersWithUsage"; private static final String QUERY_GET_NODES_WITH_PROPERTY_VALUES_BY_ACTUAL_TYPE = "node.GetNodesWithPropertyValuesByActualType"; + private static final String QUERY_GET_DELETED_NODES_BY_MAX_TXNID = "node.GetDeletedNodesByMaxTxnId"; private static final String QUERY_GET_SERVER_BY_IPADDRESS = "server.getServerByIpAddress"; private static final Long NULL_CACHE_VALUE = new Long(-1); @@ -521,14 +523,14 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements * * @param nodeId the node's ID * @return the node - * @throws AlfrescoRuntimeException if the ID doesn't refer to a node. + * @throws ObjectNotFoundException if the ID doesn't refer to a node. */ private Node getNodeNotNull(Long nodeId) { Node node = (Node) getHibernateTemplate().get(NodeImpl.class, nodeId); if (node == null) { - throw new AlfrescoRuntimeException("Node ID " + nodeId + " is invalid"); + throw new ObjectNotFoundException(nodeId, NodeImpl.class.getName()); } return node; } @@ -573,7 +575,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements * @see #QUERY_GET_ALL_STORES */ @SuppressWarnings("unchecked") - public List getStoreRefs() + public List> getStores() { HibernateCallback callback = new HibernateCallback() { @@ -585,10 +587,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements } }; List stores = (List) getHibernateTemplate().execute(callback); - List storeRefs = new ArrayList(stores.size()); + List> storeRefs = new ArrayList>(stores.size()); for (Store store : stores) { - storeRefs.add(store.getStoreRef()); + Pair storePair = new Pair(store.getId(), store.getStoreRef()); + storeRefs.add(storePair); } // done return storeRefs; @@ -714,17 +717,19 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements return query.uniqueResult(); } }; - Node node = (Node) getHibernateTemplate().execute(callback); + Object[] result = (Object[]) getHibernateTemplate().execute(callback); // Cache the value - if (node == null) + final Node node; + if (result == null) { + node = null; storeAndNodeIdCache.put(nodeRef, NULL_CACHE_VALUE); } else { + node = (Node) result[0]; storeAndNodeIdCache.put(nodeRef, node.getId()); } - // TODO: Fill cache here return node; } @@ -1336,6 +1341,18 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements recordNodeDelete(node); } + /** + * Final purge of the node entry. No transaction recording is done for this. + */ + public void purgeNode(Long nodeId) + { + Node node = (Node) getSession().get(NodeImpl.class, nodeId); + if (node != null) + { + getHibernateTemplate().delete(node); + } + } + private static final String QUERY_DELETE_PARENT_ASSOCS = "node.DeleteParentAssocs"; private static final String QUERY_DELETE_CHILD_ASSOCS = "node.DeleteChildAssocs"; private static final String QUERY_DELETE_NODE_ASSOCS = "node.DeleteNodeAssocs"; @@ -2364,14 +2381,19 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements // Done } - public void getNodesWithChildrenInDifferentStores(final Long minNodeId, final int count, NodeRefQueryCallback resultsCallback) + public void getNodesWithChildrenInDifferentStore( + final Long storeId, + final Long minNodeId, + final int count, + NodeRefQueryCallback resultsCallback) { HibernateCallback callback = new HibernateCallback() { public Object doInHibernate(Session session) { Query query = session - .getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORES) + .getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORE) + .setLong("parentStoreId", storeId) .setLong("minNodeId", minNodeId) .setMaxResults(count); DirtySessionMethodInterceptor.setQueryFlushMode(session, query); @@ -2397,10 +2419,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements /** *
-            Long parentId = (Long) row[0];
-            String parentProtocol = (String) row[1];
-            String parentIdentifier = (String) row[2];
-            String parentUuid = (String) row[3];
+            Node ID = (Long) row[0];
+            Node Protocol = (String) row[1];
+            Node Identifier = (String) row[2];
+            Node Uuid = (String) row[3];
      * 
*/ private void processNodeResults(ScrollableResults queryResults, NodeRefQueryCallback resultsCallback) @@ -3125,12 +3147,57 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements } } + @SuppressWarnings("unchecked") + public void getNodesDeletedInOldTxns( + final Long minNodeId, + long maxCommitTime, + final int count, + NodeRefQueryCallback resultsCallback) + { + // Get the max transaction ID + final Long maxTxnId = getMaxTxnIdByCommitTime(maxCommitTime); + + // Shortcut + if (maxTxnId == null) + { + return; + } + + HibernateCallback callback = new HibernateCallback() + { + public Object doInHibernate(Session session) + { + Query query = session.getNamedQuery(QUERY_GET_DELETED_NODES_BY_MAX_TXNID); + query.setLong("minNodeId", minNodeId); + query.setLong("maxTxnId", maxTxnId); + query.setMaxResults(count); + query.setReadOnly(true); + return query.scroll(ScrollMode.FORWARD_ONLY); + } + }; + ScrollableResults queryResults = null; + try + { + queryResults = (ScrollableResults) getHibernateTemplate().execute(callback); + processNodeResults(queryResults, resultsCallback); + } + finally + { + if (queryResults != null) + { + queryResults.close(); + } + } + // Done + } + /* * Queries for transactions */ private static final String QUERY_GET_TXN_BY_ID = "txn.GetTxnById"; private static final String QUERY_GET_MIN_COMMIT_TIME = "txn.GetMinCommitTime"; private static final String QUERY_GET_MAX_COMMIT_TIME = "txn.GetMaxCommitTime"; + private static final String QUERY_GET_MAX_ID_BY_COMMIT_TIME = "txn.GetMaxIdByCommitTime"; private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_ASC = "txn.GetTxnsByCommitTimeAsc"; private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_DESC = "txn.GetTxnsByCommitTimeDesc"; private static final String QUERY_GET_SELECTED_TXNS_BY_COMMIT_TIME_ASC = "txn.GetSelectedTxnsByCommitAsc"; @@ -3139,6 +3206,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements private static final String QUERY_COUNT_TRANSACTIONS = "txn.CountTransactions"; private static final String QUERY_GET_TXN_CHANGES_FOR_STORE = "txn.GetTxnChangesForStore"; private static final String QUERY_GET_TXN_CHANGES = "txn.GetTxnChanges"; + private static final String QUERY_GET_TXNS_UNUSED = "txn.GetTxnsUnused"; public Transaction getTxnById(final long txnId) { @@ -3190,6 +3258,23 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements return (commitTime == null) ? 0L : commitTime; } + public Long getMaxTxnIdByCommitTime(final long maxCommitTime) + { + HibernateCallback callback = new HibernateCallback() + { + public Object doInHibernate(Session session) + { + Query query = session.getNamedQuery(QUERY_GET_MAX_ID_BY_COMMIT_TIME); + query.setLong("maxCommitTime", maxCommitTime); + query.setReadOnly(true); + return query.uniqueResult(); + } + }; + Long txnId = (Long) getHibernateTemplate().execute(callback); + // done + return txnId; + } + @SuppressWarnings("unchecked") public List getTxnsByMinCommitTime(final List includeTxnIds) { @@ -3518,6 +3603,36 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements return nodeRefs; } + @SuppressWarnings("unchecked") + public List getTxnsUnused(final Long minTxnId, final long maxCommitTime, final int count) + { + HibernateCallback callback = new HibernateCallback() + { + public Object doInHibernate(Session session) + { + Query query = session.getNamedQuery(QUERY_GET_TXNS_UNUSED); + query.setReadOnly(true) + .setMaxResults(count) + .setLong("minTxnId", minTxnId) + .setLong("maxCommitTime", maxCommitTime); + DirtySessionMethodInterceptor.setQueryFlushMode(session, query); + return query.list(); + } + }; + List results = (List) getHibernateTemplate().execute(callback); + // done + return results; + } + + public void purgeTxn(Long txnId) + { + Transaction txn = (Transaction) getSession().get(TransactionImpl.class, txnId); + if (txn != null) + { + getHibernateTemplate().delete(txn); + } + } + //============ PROPERTY HELPER METHODS =================// public static Map convertToPersistentProperties( diff --git a/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java b/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java index 022d540fa0..cc05689f35 100644 --- a/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java +++ b/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java @@ -24,6 +24,9 @@ */ package org.alfresco.repo.node.index; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.StringWriter; import java.util.Iterator; import java.util.List; import java.util.concurrent.LinkedBlockingQueue; @@ -785,6 +788,7 @@ public abstract class AbstractReindexComponent implements IndexRecovery id, e.getMessage()); loggerOnThread.warn(msg); + loggerOnThread.warn(getStackTrace(e)); } catch (Throwable e) { @@ -793,6 +797,7 @@ public abstract class AbstractReindexComponent implements IndexRecovery id, e.getMessage()); loggerOnThread.error(msg); + loggerOnThread.warn(getStackTrace(e)); } finally { @@ -800,6 +805,18 @@ public abstract class AbstractReindexComponent implements IndexRecovery removeFromQueueAndProdHead(); } } + + public String getStackTrace(Throwable t) + { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw, true); + t.printStackTrace(pw); + pw.flush(); + sw.flush(); + return sw.toString(); + } + + public synchronized void reindexedNode(NodeRef nodeRef) { // Check for forced kill diff --git a/source/java/org/alfresco/service/cmr/repository/NodeService.java b/source/java/org/alfresco/service/cmr/repository/NodeService.java index 35e679107a..13c94788eb 100644 --- a/source/java/org/alfresco/service/cmr/repository/NodeService.java +++ b/source/java/org/alfresco/service/cmr/repository/NodeService.java @@ -62,20 +62,6 @@ import org.alfresco.service.namespace.QNamePattern; @PublicService public interface NodeService { - /** - * Kick off any cleanup processes relating to the the particular implementation. - *

- * This must cover cleanup of orphaned data and other housekeeping tasks that may - * be required. - *

- * NB: Implementations should guard against multithreaded entry without - * blocking. - * - * @return Returns a list of messages detailing what was done. - */ - @Auditable(key = Auditable.Key.NO_KEY) - public List cleanup(); - /** * Gets a list of all available node store references *