Merged V3.0 to HEAD

12140: Merged V2.2 to V3.0
    11732: Fixed ETWOTWO-804: Node and Transaction Cleanup Job
    11747: Missed config for Node and Txn purging
    11826: WCM - fix ETWOTWO-817
    11951: Fixed ETWOTWO-901: NodeService cleanup must be pluggable
    11961: Merged V2.1 to V2.2
      11561: ETWOONE-224: when renaming duplicates during copy association names where not renamed 
      11583: (ALREADY PRESENT) Updated NTLM config example in web.xml - adding missing servlet mappings
      11584: Fix for ETWOONE-209 - JavaScript People.createGroup() API now correctly checks for actual group name when testing for existence
      11585: Fix for ETWOONE-214 - View In CIFS link now works even when users des not have view permissions on the parent folder
      11612: Fix for ETWOONE-91: the description textarea in the modify space properties web form eats one leading newline each time it is submitted
      11613: Fix 2.1 build and adjust implementation of ETWOONE-224 fix
      11621: Fix for ETWOONE-343
      11669: Improved debug from index tracking when exceptions occur
  12141: Avoid annoying Spring WARN messages for ClientAbortException
  12143: File that should have been deleted in CHK-5460 (rev 12140)
  12177: Fix failing FS Deployment Tests since introduction of transaction check advice.


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@12507 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Jan Vonka
2008-12-19 10:21:51 +00:00
parent 16861e9117
commit 21bb599e20
24 changed files with 1086 additions and 253 deletions

View File

@@ -57,6 +57,9 @@
<property name="avmService"> <property name="avmService">
<ref bean="indexingAVMService"/> <ref bean="indexingAVMService"/>
</property> </property>
<property name="transactionService">
<ref bean="transactionService"/>
</property>
<!-- how many files to send in parallel --> <!-- how many files to send in parallel -->
<property name="numberOfSendingThreads"> <property name="numberOfSendingThreads">

View File

@@ -176,6 +176,36 @@
</property> </property>
</bean> </bean>
<!-- Node cleanup -->
<bean id="nodeCleanupRegistry" class="org.alfresco.repo.node.cleanup.NodeCleanupRegistry" />
<bean id="nodeCleanupBase" abstract="true" init-method="register">
<property name="registry">
<ref bean="nodeCleanupRegistry" />
</property>
<property name="transactionService">
<ref bean="transactionService" />
</property>
<property name="dbNodeService">
<ref bean="dbNodeServiceImpl" />
</property>
<property name="nodeDaoService">
<ref bean="nodeDaoService" />
</property>
</bean>
<bean id="nodeCleanup.moveChildrenToCorrectStore"
class="org.alfresco.repo.node.db.DbNodeServiceImpl$MoveChildrenToCorrectStore"
parent="nodeCleanupBase"/>
<bean id="nodeCleanup.indexChildrenWhereRequired"
class="org.alfresco.repo.node.db.IndexChildrenWhereRequiredWorker"
parent="nodeCleanupBase"/>
<bean id="nodeCleanup.deleteNodeCleanup"
class="org.alfresco.repo.node.db.DeletedNodeCleanupWorker"
parent="nodeCleanupBase">
<property name="minPurgeAgeDays">
<value>${index.tracking.minRecordPurgeAgeDays}</value>
</property>
</bean>
<!-- NodeService implemented to persist to Database. Resource management enabled. --> <!-- NodeService implemented to persist to Database. Resource management enabled. -->
<bean id="dbNodeService" class="org.springframework.aop.framework.ProxyFactoryBean"> <bean id="dbNodeService" class="org.springframework.aop.framework.ProxyFactoryBean">
<property name="proxyInterfaces"> <property name="proxyInterfaces">

View File

@@ -50,6 +50,11 @@ index.tracking.reindexLagMs=1000
index.tracking.maxRecordSetSize=1000 index.tracking.maxRecordSetSize=1000
index.tracking.maxTransactionsPerLuceneCommit=100 index.tracking.maxTransactionsPerLuceneCommit=100
index.tracking.disableInTransactionIndexing=false index.tracking.disableInTransactionIndexing=false
# Index tracking information of a certain age is cleaned out by a scheduled job.
# Any clustered system that has been offline for longer than this period will need to be seeded
# with a more recent backup of the Lucene indexes or the indexes will have to be fully rebuilt.
# Use -1 to disable purging. This can be switched on at any stage.
index.tracking.minRecordPurgeAgeDays=30
# Change the failure behaviour of the configuration checker # Change the failure behaviour of the configuration checker
system.bootstrap.config_check.strict=true system.bootstrap.config_check.strict=true

View File

@@ -153,12 +153,12 @@
<bean id="nodeServiceCleanupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean"> <bean id="nodeServiceCleanupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass"> <property name="jobClass">
<value>org.alfresco.repo.node.db.NodeServiceCleanupJob</value> <value>org.alfresco.repo.node.cleanup.NodeCleanupJob</value>
</property> </property>
<property name="jobDataAsMap"> <property name="jobDataAsMap">
<map> <map>
<entry key="nodeService"> <entry key="nodeCleanupWorker">
<ref bean="nodeService" /> <ref bean="nodeCleanupRegistry" />
</entry> </entry>
</map> </map>
</property> </property>
@@ -170,9 +170,8 @@
<property name="scheduler"> <property name="scheduler">
<ref bean="schedulerFactory" /> <ref bean="schedulerFactory" />
</property> </property>
<!-- trigger hourly -->
<property name="cronExpression"> <property name="cronExpression">
<value>0 15 * * * ?</value> <value>0 0 21 * * ?</value>
</property> </property>
</bean> </bean>

View File

@@ -54,7 +54,6 @@ import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.CopyService; import org.alfresco.service.cmr.repository.CopyService;
import org.alfresco.service.cmr.repository.CopyServiceException; import org.alfresco.service.cmr.repository.CopyServiceException;
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef; import org.alfresco.service.cmr.repository.StoreRef;
@@ -71,9 +70,6 @@ import org.alfresco.service.namespace.RegexQNamePattern;
import org.alfresco.util.ParameterCheck; import org.alfresco.util.ParameterCheck;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.tools.ant.taskdefs.War;
import freemarker.log.Logger;
/** /**
* Node operations service implmentation. * Node operations service implmentation.
@@ -224,6 +220,9 @@ public class CopyServiceImpl implements CopyService
new JavaBehaviour(this, "onCopyComplete")); new JavaBehaviour(this, "onCopyComplete"));
} }
/**
* @see org.alfresco.service.cmr.repository.CopyService#copy(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, boolean)
*/
public NodeRef copy( public NodeRef copy(
NodeRef sourceNodeRef, NodeRef sourceNodeRef,
NodeRef destinationParentRef, NodeRef destinationParentRef,
@@ -267,29 +266,36 @@ public class CopyServiceImpl implements CopyService
return copy; return copy;
} }
/**
* @see org.alfresco.service.cmr.repository.CopyService#copyAndRename(org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.cmr.repository.NodeRef, org.alfresco.service.namespace.QName, org.alfresco.service.namespace.QName, boolean)
*/
public NodeRef copyAndRename(NodeRef sourceNodeRef, NodeRef destinationParent, QName destinationAssocTypeQName, QName destinationQName, boolean copyChildren) public NodeRef copyAndRename(NodeRef sourceNodeRef, NodeRef destinationParent, QName destinationAssocTypeQName, QName destinationQName, boolean copyChildren)
{ {
// Make a note of the source name and do the copy // To fix ETWOONE-224 issue it is necessary to change a QName of the new node accordingly to its name.
NodeRef result = null;
String sourceName = (String)this.internalNodeService.getProperty(sourceNodeRef, ContentModel.PROP_NAME); String sourceName = (String)this.internalNodeService.getProperty(sourceNodeRef, ContentModel.PROP_NAME);
NodeRef copy = copy(sourceNodeRef, destinationParent, destinationAssocTypeQName, destinationQName, copyChildren);
// Do the rename, iterating until a non-duplicate name is found // Find a non-duplicate name
boolean bDone = false; String newName = sourceName;
while (bDone == false) while (this.internalNodeService.getChildByName(destinationParent, destinationAssocTypeQName, newName) != null)
{ {
try newName = I18NUtil.getMessage(COPY_OF_LABEL, newName);
{
this.internalNodeService.setProperty(copy, ContentModel.PROP_NAME, sourceName);
bDone = true;
}
catch(DuplicateChildNodeNameException exception)
{
sourceName = I18NUtil.getMessage(COPY_OF_LABEL, sourceName);
}
} }
// Return the copy if (destinationQName == null)
return copy; {
// Change a QName of the new node accordingly to its name
destinationQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, QName.createValidLocalName(newName));
}
// Make a copy
result = copy(sourceNodeRef, destinationParent, destinationAssocTypeQName, destinationQName, copyChildren);
// Set name property
this.internalNodeService.setProperty(result, ContentModel.PROP_NAME, newName);
// Return new NodeRef
return result;
} }
/** /**

View File

@@ -660,6 +660,35 @@ public class CopyServiceImplTest extends BaseSpringTest
assertFalse(TEST_NAME.equals(this.nodeService.getProperty(contentCopy, ContentModel.PROP_NAME))); assertFalse(TEST_NAME.equals(this.nodeService.getProperty(contentCopy, ContentModel.PROP_NAME)));
} }
/**
* https://issues.alfresco.com/jira/browse/ETWOONE-224
*/
public void testETWOONE_244()
{
// Create a folder and content node
Map<QName, Serializable> propsFolder = new HashMap<QName, Serializable>(1);
propsFolder.put(ContentModel.PROP_NAME, "tempFolder");
NodeRef folderNode = this.nodeService.createNode(this.rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "tempFolder"), ContentModel.TYPE_FOLDER, propsFolder).getChildRef();
Map<QName, Serializable> props = new HashMap<QName, Serializable>(1);
props.put(ContentModel.PROP_NAME, "myDoc.txt");
NodeRef contentNode = this.nodeService.createNode(folderNode, ContentModel.ASSOC_CONTAINS, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "myDoc.txt"), ContentModel.TYPE_CONTENT, props).getChildRef();
NodeRef copy = this.copyService.copyAndRename(contentNode, folderNode, ContentModel.ASSOC_CONTAINS, null, false);
assertEquals("Copy of myDoc.txt", this.nodeService.getProperty(copy, ContentModel.PROP_NAME));
QName copyQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Copy of myDoc.txt");
assertEquals(copyQName, this.nodeService.getPrimaryParent(copy).getQName());
copy = this.copyService.copyAndRename(contentNode, folderNode, ContentModel.ASSOC_CONTAINS, null, false);
assertEquals("Copy of Copy of myDoc.txt", this.nodeService.getProperty(copy, ContentModel.PROP_NAME));
copyQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Copy of Copy of myDoc.txt");
assertEquals(copyQName, this.nodeService.getPrimaryParent(copy).getQName());
copy = this.copyService.copyAndRename(contentNode, folderNode, ContentModel.ASSOC_CONTAINS, null, false);
assertEquals("Copy of Copy of Copy of myDoc.txt", this.nodeService.getProperty(copy, ContentModel.PROP_NAME));
copyQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Copy of Copy of Copy of myDoc.txt");
assertEquals(copyQName, this.nodeService.getPrimaryParent(copy).getQName());
}
/** /**
* Check that the copied node contains the state we are expecting * Check that the copied node contains the state we are expecting
* *

View File

@@ -57,6 +57,8 @@ import org.alfresco.repo.remote.AVMSyncServiceRemote;
import org.alfresco.repo.remote.ClientTicketHolder; import org.alfresco.repo.remote.ClientTicketHolder;
import org.alfresco.repo.remote.ClientTicketHolderThread; import org.alfresco.repo.remote.ClientTicketHolderThread;
import org.alfresco.repo.security.authentication.AuthenticationUtil; import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.action.ActionService; import org.alfresco.service.cmr.action.ActionService;
import org.alfresco.service.cmr.action.ActionServiceTransport; import org.alfresco.service.cmr.action.ActionServiceTransport;
import org.alfresco.service.cmr.avm.AVMException; import org.alfresco.service.cmr.avm.AVMException;
@@ -76,6 +78,7 @@ import org.alfresco.service.cmr.remote.AVMSyncServiceTransport;
import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.security.AuthenticationService; import org.alfresco.service.cmr.security.AuthenticationService;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.NameMatcher; import org.alfresco.util.NameMatcher;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@@ -100,6 +103,11 @@ public class DeploymentServiceImpl implements DeploymentService
*/ */
private AVMService fAVMService; private AVMService fAVMService;
/**
* The local Transaction Service Instance
*/
TransactionService trxService;
/** /**
* The Ticket holder. * The Ticket holder.
*/ */
@@ -128,6 +136,15 @@ public class DeploymentServiceImpl implements DeploymentService
fAVMService = service; fAVMService = service;
} }
/**
* Setter.
* @param trxService The instance to set.
*/
public void setTransactionService(TransactionService trxService)
{
this.trxService = trxService;
}
/* /*
* Deploy differences to an ASR * Deploy differences to an ASR
* (non-Javadoc) * (non-Javadoc)
@@ -848,7 +865,7 @@ public class DeploymentServiceImpl implements DeploymentService
SendQueueWorker[] workers = new SendQueueWorker[numberOfSendingThreads]; SendQueueWorker[] workers = new SendQueueWorker[numberOfSendingThreads];
for(int i = 0; i < numberOfSendingThreads; i++) for(int i = 0; i < numberOfSendingThreads; i++)
{ {
workers[i] = new SendQueueWorker(currentEffectiveUser, service, fAVMService, errors, eventQueue, sendQueue, transformers); workers[i] = new SendQueueWorker(currentEffectiveUser, service, fAVMService, trxService, errors, eventQueue, sendQueue, transformers);
workers[i].setName(workers[i].getClass().getName()); workers[i].setName(workers[i].getClass().getName());
workers[i].setPriority(Thread.currentThread().getPriority()); workers[i].setPriority(Thread.currentThread().getPriority());
} }
@@ -1329,6 +1346,7 @@ public class DeploymentServiceImpl implements DeploymentService
private DeploymentReceiverService service; private DeploymentReceiverService service;
private String userName; private String userName;
private AVMService avmService; private AVMService avmService;
private TransactionService trxService;
List<Exception> errors; List<Exception> errors;
List<DeploymentTransportOutputFilter> transformers; List<DeploymentTransportOutputFilter> transformers;
@@ -1337,6 +1355,7 @@ public class DeploymentServiceImpl implements DeploymentService
SendQueueWorker(String userName, SendQueueWorker(String userName,
DeploymentReceiverService service, DeploymentReceiverService service,
AVMService avmService, AVMService avmService,
TransactionService trxService,
List<Exception> errors, List<Exception> errors,
BlockingQueue<DeploymentEvent> eventQueue, BlockingQueue<DeploymentEvent> eventQueue,
BlockingQueue<DeploymentWork> sendQueue, BlockingQueue<DeploymentWork> sendQueue,
@@ -1347,11 +1366,10 @@ public class DeploymentServiceImpl implements DeploymentService
this.sendQueue = sendQueue; this.sendQueue = sendQueue;
this.service = service; this.service = service;
this.avmService = avmService; this.avmService = avmService;
this.trxService = trxService;
this.errors = errors; this.errors = errors;
this.transformers = transformers; this.transformers = transformers;
this.userName = userName; this.userName = userName;
} }
public void run() public void run()
@@ -1434,11 +1452,18 @@ public class DeploymentServiceImpl implements DeploymentService
* @param dstPath where to copy the file * @param dstPath where to copy the file
*/ */
private void copyFileToFSR( private void copyFileToFSR(
AVMNodeDescriptor src, final AVMNodeDescriptor src,
String dstPath, final String dstPath,
String ticket) final String ticket)
{ {
try try
{
// Perform copy within 'read only' transaction
RetryingTransactionHelper trx = trxService.getRetryingTransactionHelper();
trx.setMaxRetries(1);
trx.doInTransaction(new RetryingTransactionCallback<Boolean>()
{
public Boolean execute() throws Exception
{ {
InputStream in = avmService.getFileInputStream(src); InputStream in = avmService.getFileInputStream(src);
@@ -1459,6 +1484,9 @@ public class DeploymentServiceImpl implements DeploymentService
copyStream(in, out); copyStream(in, out);
service.finishSend(ticket, baseStream); service.finishSend(ticket, baseStream);
return true;
}
}, true);
} }
catch (Exception e) catch (Exception e)
{ {

View File

@@ -249,9 +249,11 @@
<query name="node.GetNodeByStoreIdAndUuid"> <query name="node.GetNodeByStoreIdAndUuid">
select select
node node,
acl
from from
org.alfresco.repo.domain.hibernate.NodeImpl as node org.alfresco.repo.domain.hibernate.NodeImpl as node
left outer join node.accessControlList as acl
where where
node.store.id = :storeId and node.store.id = :storeId and
node.uuid = :uuid node.uuid = :uuid
@@ -531,7 +533,7 @@
assoc.id assoc.id
</query> </query>
<query name="node.GetNodesWithChildrenInDifferentStores"> <query name="node.GetNodesWithChildrenInDifferentStore">
select select
parent.id, parent.id,
parentStore.protocol, parentStore.protocol,
@@ -542,8 +544,10 @@
join assoc.parent as parent join assoc.parent as parent
join parent.store as parentStore join parent.store as parentStore
join assoc.child as child join assoc.child as child
join child.store as childStore
where where
child.store.id != parent.store.id and parentStore.id = :parentStoreId and
childStore.id != :parentStoreId and
parent.id > :minNodeId and parent.id > :minNodeId and
assoc.isPrimary = true assoc.isPrimary = true
order by order by
@@ -704,4 +708,23 @@
props.serializableValue is not null props.serializableValue is not null
</query> </query>
<query name="node.GetDeletedNodesByMaxTxnId">
<![CDATA[
select
node.id,
node.store.protocol,
node.store.identifier,
node.uuid
from
org.alfresco.repo.domain.hibernate.NodeImpl as node
join node.transaction as txn
where
node.id >= :minNodeId and
node.deleted = true and
txn.id <= :maxTxnId
order by
node.id asc
]]>
</query>
</hibernate-mapping> </hibernate-mapping>

View File

@@ -88,6 +88,17 @@
org.alfresco.repo.domain.hibernate.TransactionImpl as txn org.alfresco.repo.domain.hibernate.TransactionImpl as txn
</query> </query>
<query name="txn.GetMaxIdByCommitTime">
<![CDATA[
select
max(txn.id)
from
org.alfresco.repo.domain.hibernate.TransactionImpl as txn
where
txn.commitTimeMs <= :maxCommitTime
]]>
</query>
<query name="txn.GetTxnsByCommitTimeAsc"> <query name="txn.GetTxnsByCommitTimeAsc">
<![CDATA[ <![CDATA[
select select
@@ -185,4 +196,20 @@
node.transaction.id = :txnId node.transaction.id = :txnId
</query> </query>
<query name="txn.GetTxnsUnused">
<![CDATA[
select
txn.id
from
org.alfresco.repo.domain.hibernate.NodeImpl as node
right join node.transaction as txn
where
node.id is null and
txn.id >= :minTxnId and
txn.commitTimeMs <= :maxCommitTime
order by
txn.id asc
]]>
</query>
</hibernate-mapping> </hibernate-mapping>

View File

@@ -413,7 +413,7 @@ public final class People extends BaseScopableProcessorExtension
ScriptNode group = null; ScriptNode group = null;
String actualName = services.getAuthorityService().getName(AuthorityType.GROUP, groupName); String actualName = services.getAuthorityService().getName(AuthorityType.GROUP, groupName);
if (authorityService.authorityExists(groupName) == false) if (authorityService.authorityExists(actualName) == false)
{ {
String parentGroupName = null; String parentGroupName = null;
if (parentGroup != null) if (parentGroup != null)

View File

@@ -1409,7 +1409,7 @@ public class ScriptNode implements Serializable, Scopeable
if (destination.getNodeRef().getStoreRef().getProtocol().equals(StoreRef.PROTOCOL_WORKSPACE)) if (destination.getNodeRef().getStoreRef().getProtocol().equals(StoreRef.PROTOCOL_WORKSPACE))
{ {
NodeRef copyRef = this.services.getCopyService().copyAndRename(this.nodeRef, destination.getNodeRef(), NodeRef copyRef = this.services.getCopyService().copyAndRename(this.nodeRef, destination.getNodeRef(),
ContentModel.ASSOC_CONTAINS, getPrimaryParentAssoc().getQName(), deepCopy); ContentModel.ASSOC_CONTAINS, null, deepCopy);
copy = newInstance(copyRef, this.services, this.scope); copy = newInstance(copyRef, this.services, this.scope);
} }
else else

View File

@@ -30,7 +30,6 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import org.alfresco.model.ContentModel; import org.alfresco.model.ContentModel;
import org.alfresco.repo.node.NodeServicePolicies.BeforeAddAspectPolicy; import org.alfresco.repo.node.NodeServicePolicies.BeforeAddAspectPolicy;
@@ -59,8 +58,6 @@ import org.alfresco.repo.policy.AssociationPolicyDelegate;
import org.alfresco.repo.policy.ClassPolicyDelegate; import org.alfresco.repo.policy.ClassPolicyDelegate;
import org.alfresco.repo.policy.PolicyComponent; import org.alfresco.repo.policy.PolicyComponent;
import org.alfresco.repo.search.Indexer; import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.cmr.dictionary.ClassDefinition; import org.alfresco.service.cmr.dictionary.ClassDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService; import org.alfresco.service.cmr.dictionary.DictionaryService;
@@ -648,47 +645,4 @@ public abstract class AbstractNodeServiceImpl implements NodeService
} }
return properties; return properties;
} }
/**
* Override to implement cleanup processes. The default does nothing.
* <p>
* This method will be called as the <b>system</b> user but without any
* additional transactions.
*/
protected List<String> cleanupImpl()
{
// No operation
return Collections.emptyList();
}
/** Prevent multiple executions of the implementation method */
private ReentrantLock cleanupLock = new ReentrantLock();
public final List<String> cleanup()
{
boolean locked = cleanupLock.tryLock();
if (locked)
{
try
{
// Authenticate as system
RunAsWork<List<String>> cleanupWork = new RunAsWork<List<String>>()
{
public List<String> doWork() throws Exception
{
// The current thread got the lock
return cleanupImpl();
}
};
return AuthenticationUtil.runAs(cleanupWork, AuthenticationUtil.SYSTEM_USER_NAME);
}
finally
{
cleanupLock.unlock();
}
}
else
{
return Collections.emptyList();
}
}
} }

View File

@@ -0,0 +1,147 @@
package org.alfresco.repo.node.cleanup;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.locks.ReentrantLock;
import org.alfresco.error.StackTraceUtil;
import org.alfresco.repo.node.db.DbNodeServiceImpl;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.PropertyCheck;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Base class for Node cleaners. This class ensures calls through
* after having created a read-write transaction that is authenticated
* as system.
*
* @author Derek Hulley
* @since 2.2 SP2
*/
public abstract class AbstractNodeCleanupWorker implements NodeCleanupWorker
{
protected final Log logger;
private final ReentrantLock cleanupLock;
private NodeCleanupRegistry registry;
protected TransactionService transactionService;
protected DbNodeServiceImpl dbNodeService;
protected NodeDaoService nodeDaoService;
public AbstractNodeCleanupWorker()
{
logger = LogFactory.getLog(this.getClass());
cleanupLock = new ReentrantLock();
}
public void setRegistry(NodeCleanupRegistry registry)
{
this.registry = registry;
}
public void setTransactionService(TransactionService transactionService)
{
this.transactionService = transactionService;
}
public void setDbNodeService(DbNodeServiceImpl dbNodeService)
{
this.dbNodeService = dbNodeService;
}
public void setNodeDaoService(NodeDaoService nodeDaoService)
{
this.nodeDaoService = nodeDaoService;
}
public void register()
{
PropertyCheck.mandatory(this, "registry", registry);
PropertyCheck.mandatory(this, "transactionService", transactionService);
PropertyCheck.mandatory(this, "dbNodeService", dbNodeService);
PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService);
registry.register(this);
}
/**
* Calls {@link #doCleanInternal()} in a System-user authenticated read-write transaction.
* This method is non-blocking but passes all second and subsequent concurrent invocations
* straight through.
*/
public List<String> doClean()
{
/** Prevent multiple executions of the implementation method */
boolean locked = cleanupLock.tryLock();
if (locked)
{
try
{
return doCleanWithTxn();
}
catch (Throwable e)
{
if (logger.isDebugEnabled())
{
StringBuilder sb = new StringBuilder(1024);
StackTraceUtil.buildStackTrace(
"Node cleanup failed: " +
" Worker: " + this.getClass().getName() + "\n" +
" Error: ",
e.getStackTrace(),
sb,
Integer.MAX_VALUE);
logger.debug(sb.toString());
}
StringBuilder sb = new StringBuilder(1024);
StackTraceUtil.buildStackTrace(
"Node cleanup failed: " +
" Worker: " + this.getClass().getName() + "\n" +
" Error: ",
e.getStackTrace(),
sb,
20);
return Collections.singletonList(sb.toString());
}
finally
{
cleanupLock.unlock();
}
}
else
{
return Collections.emptyList();
}
}
private List<String> doCleanWithTxn()
{
final RetryingTransactionCallback<List<String>> doCleanCallback = new RetryingTransactionCallback<List<String>>()
{
public List<String> execute() throws Throwable
{
return doCleanInternal();
}
};
final RunAsWork<List<String>> doCleanRunAs = new RunAsWork<List<String>>()
{
public List<String> doWork() throws Exception
{
return transactionService.getRetryingTransactionHelper().doInTransaction(doCleanCallback, false, true);
}
};
return AuthenticationUtil.runAs(doCleanRunAs, AuthenticationUtil.getSystemUserName());
}
/**
* Do the actual cleanup. Any errors are handled by this base class.
*
* @return Returns the cleanup messages.
*/
protected abstract List<String> doCleanInternal() throws Throwable;
}

View File

@@ -22,36 +22,50 @@
* the FLOSS exception, and it is also available here: * the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing" * http://www.alfresco.com/legal/licensing"
*/ */
package org.alfresco.repo.node.db; package org.alfresco.repo.node.cleanup;
import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.service.cmr.repository.NodeService; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.Job; import org.quartz.Job;
import org.quartz.JobDataMap; import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException; import org.quartz.JobExecutionException;
/** /**
* Prompts the Node Service to perform regular cleanup operations. * Scheduled job to call a {@link NodeCleanupWorker}.
* * <p>
* @see NodeService#cleanup() * Job data is: <b>nodeCleanupWorker</b>
* *
* @author Derek Hulley * @author Derek Hulley
* @since 2.1.6 * @since 2.2SP2
*/ */
public class NodeServiceCleanupJob implements Job public class NodeCleanupJob implements Job
{ {
private static Log logger = LogFactory.getLog(NodeCleanupJob.class);
public void execute(JobExecutionContext context) throws JobExecutionException public void execute(JobExecutionContext context) throws JobExecutionException
{ {
JobDataMap jobData = context.getJobDetail().getJobDataMap(); JobDataMap jobData = context.getJobDetail().getJobDataMap();
// extract the content cleaner to use // extract the content Cleanup to use
Object nodeServiceObj = jobData.get("nodeService"); Object nodeCleanupWorkerObj = jobData.get("nodeCleanupWorker");
if (nodeServiceObj == null || !(nodeServiceObj instanceof NodeService)) if (nodeCleanupWorkerObj == null || !(nodeCleanupWorkerObj instanceof NodeCleanupWorker))
{ {
throw new AlfrescoRuntimeException( throw new AlfrescoRuntimeException(
"NodeServiceCleanupJob data must contain valid 'nodeService' reference"); "NodeCleanupJob data must contain valid 'nodeCleanupWorker' reference");
}
NodeCleanupWorker nodeCleanupWorker = (NodeCleanupWorker) nodeCleanupWorkerObj;
List<String> cleanupLog = nodeCleanupWorker.doClean();
// Done
if (logger.isDebugEnabled())
{
logger.debug("Node cleanup log:");
for (String log : cleanupLog)
{
logger.debug(log);
}
} }
NodeService nodeService = (NodeService) nodeServiceObj;
nodeService.cleanup();
} }
} }

View File

@@ -0,0 +1,65 @@
package org.alfresco.repo.node.cleanup;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.error.StackTraceUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* A {@link NodeCleanupWorker worker} that aggregates any number of
* {@link #register(NodeCleanupWorker) registered} workers.
*
* @author Derek Hulley
* @since 2.2 SP2
*/
public class NodeCleanupRegistry implements NodeCleanupWorker
{
private static Log logger = LogFactory.getLog(NodeCleanupRegistry.class);
private List<NodeCleanupWorker> cleanupWorkers;
public NodeCleanupRegistry()
{
cleanupWorkers = new ArrayList<NodeCleanupWorker>(5);
}
public void register(NodeCleanupWorker cleanupWorker)
{
cleanupWorkers.add(cleanupWorker);
}
/**
* Calls all registered cleaners in order, without transactions or authentication.
* The return messages are aggregated.
*/
public List<String> doClean()
{
List<String> results = new ArrayList<String>(100);
for (NodeCleanupWorker cleanupWorker : cleanupWorkers)
{
try
{
results.addAll(cleanupWorker.doClean());
}
catch (Throwable e)
{
// This failed. The cleaner should be handling this, but we can't guarantee it.
logger.error(
"NodeCleanupWork doesn't handle all exception conditions: " +
cleanupWorker.getClass().getName());
StringBuilder sb = new StringBuilder(1024);
StackTraceUtil.buildStackTrace(
"Node cleanup failed: " +
" Worker: " + cleanupWorker.getClass().getName() + "\n" +
" Error: ",
e.getStackTrace(),
sb,
20);
results.add(sb.toString());
}
}
return results;
}
}

View File

@@ -0,0 +1,20 @@
package org.alfresco.repo.node.cleanup;
import java.util.List;
/**
* Interface for classes that implement a snippet of node cleanup.
*
* @author Derek Hulley
* @since 2.2 SP2
*/
public interface NodeCleanupWorker
{
/**
* Perform some work to clean up data. All errors must be handled and converted
* to error messages.
*
* @return Returns a list of informational messages.
*/
List<String> doClean();
}

View File

@@ -42,6 +42,7 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.Node;
import org.alfresco.repo.node.AbstractNodeServiceImpl; import org.alfresco.repo.node.AbstractNodeServiceImpl;
import org.alfresco.repo.node.StoreArchiveMap; import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.cleanup.AbstractNodeCleanupWorker;
import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback; import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback;
import org.alfresco.repo.node.index.NodeIndexer; import org.alfresco.repo.node.index.NodeIndexer;
import org.alfresco.repo.security.authentication.AuthenticationUtil; import org.alfresco.repo.security.authentication.AuthenticationUtil;
@@ -185,7 +186,12 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
public List<StoreRef> getStores() public List<StoreRef> getStores()
{ {
// Get the ADM stores // Get the ADM stores
List<StoreRef> storeRefs = nodeDaoService.getStoreRefs(); List<Pair<Long, StoreRef>> stores = nodeDaoService.getStores();
List<StoreRef> storeRefs = new ArrayList<StoreRef>(50);
for (Pair<Long, StoreRef> pair : stores)
{
storeRefs.add(pair.getSecond());
}
// Now get the AVMStores. // Now get the AVMStores.
List<StoreRef> avmStores = avmNodeService.getStores(); List<StoreRef> avmStores = avmNodeService.getStores();
storeRefs.addAll(avmStores); storeRefs.addAll(avmStores);
@@ -2059,7 +2065,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
} }
} }
private void indexChildren(Pair<Long, NodeRef> nodePair, boolean cascade) public void indexChildren(Pair<Long, NodeRef> nodePair, boolean cascade)
{ {
Long nodeId = nodePair.getFirst(); Long nodeId = nodePair.getFirst();
// Get the node's children, but only one's that aren't in the same store // Get the node's children, but only one's that aren't in the same store
@@ -2162,21 +2168,29 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
} }
} }
@Override public static class MoveChildrenToCorrectStore extends AbstractNodeCleanupWorker
protected List<String> cleanupImpl()
{ {
List<String> moveChildrenResults = moveChildrenToCorrectStore(); @Override
List<String> indexChildrenResults = indexChildrenWhereRequired(); protected List<String> doCleanInternal() throws Throwable
{
List<String> allResults = new ArrayList<String>(100); return dbNodeService.moveChildrenToCorrectStore();
allResults.addAll(moveChildrenResults);
allResults.addAll(indexChildrenResults);
// Done
return allResults;
} }
};
private List<String> moveChildrenToCorrectStore() private List<String> moveChildrenToCorrectStore()
{
List<String> results = new ArrayList<String>(1000);
// Repeat the process for each store
List<Pair<Long, StoreRef>> storePairs = nodeDaoService.getStores();
for (Pair<Long, StoreRef> storePair : storePairs)
{
List<String> storeResults = moveChildrenToCorrectStore(storePair.getFirst());
results.addAll(storeResults);
}
return results;
}
private List<String> moveChildrenToCorrectStore(final Long storeId)
{ {
final List<Pair<Long, NodeRef>> parentNodePairs = new ArrayList<Pair<Long, NodeRef>>(100); final List<Pair<Long, NodeRef>> parentNodePairs = new ArrayList<Pair<Long, NodeRef>>(100);
final NodeRefQueryCallback callback = new NodeRefQueryCallback() final NodeRefQueryCallback callback = new NodeRefQueryCallback()
@@ -2191,7 +2205,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
{ {
public Object execute() throws Throwable public Object execute() throws Throwable
{ {
nodeDaoService.getNodesWithChildrenInDifferentStores(Long.MIN_VALUE, 100, callback); nodeDaoService.getNodesWithChildrenInDifferentStore(storeId, Long.MIN_VALUE, 100, callback);
// Done // Done
return null; return null;
} }
@@ -2226,11 +2240,19 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
catch (Throwable e) catch (Throwable e)
{ {
String msg = String msg =
"Failed to move child nodes to parent node's store: \n" + "Failed to move child nodes to parent node's store." +
" Set log level to WARN for this class to get exception log: \n" +
" Parent node: " + parentNodePair.getFirst() + "\n" + " Parent node: " + parentNodePair.getFirst() + "\n" +
" Error: " + e.getMessage(); " Error: " + e.getMessage();
// It failed, which is not an error to consider here // It failed; do a full log in WARN mode
if (logger.isWarnEnabled())
{
logger.warn(msg, e); logger.warn(msg, e);
}
else
{
logger.error(msg);
}
results.add(msg); results.add(msg);
} }
} }
@@ -2248,88 +2270,4 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
} }
return results; return results;
} }
private List<String> indexChildrenWhereRequired()
{
final List<Pair<Long, NodeRef>> parentNodePairs = new ArrayList<Pair<Long, NodeRef>>(100);
final NodeRefQueryCallback callback = new NodeRefQueryCallback()
{
public boolean handle(Pair<Long, NodeRef> nodePair)
{
parentNodePairs.add(nodePair);
return true;
}
};
RetryingTransactionCallback<Object> getNodesCallback = new RetryingTransactionCallback<Object>()
{
public Object execute() throws Throwable
{
nodeDaoService.getNodesWithAspect(ContentModel.ASPECT_INDEX_CHILDREN, Long.MIN_VALUE, 100, callback);
// Done
return null;
}
};
transactionService.getRetryingTransactionHelper().doInTransaction(getNodesCallback, true, true);
// Process the nodes in random order
Collections.shuffle(parentNodePairs);
// Iterate and operate
List<String> results = new ArrayList<String>(100);
for (final Pair<Long, NodeRef> parentNodePair : parentNodePairs)
{
RetryingTransactionCallback<String> indexChildrenCallback = new RetryingTransactionCallback<String>()
{
public String execute() throws Throwable
{
// Index children without full cascade
indexChildren(parentNodePair, true);
// Done
return null;
}
};
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
txnHelper.setMaxRetries(1);
try
{
txnHelper.doInTransaction(indexChildrenCallback, false, true);
String msg =
"Indexed child nodes: \n" +
" Parent node: " + parentNodePair.getFirst();
results.add(msg);
}
catch (Throwable e)
{
String msg =
"Failed to index child nodes: \n" +
" Parent node: " + parentNodePair.getFirst() + "\n" +
" Error: " + e.getMessage();
// It failed, which is not an error to consider here
logger.warn(msg, e);
results.add(msg);
}
}
// Done
if (logger.isDebugEnabled())
{
StringBuilder sb = new StringBuilder(256);
sb.append("Indexed child nodes: \n")
.append(" Results:\n");
for (String msg : results)
{
sb.append(" ").append(msg).append("\n");
}
logger.debug(sb.toString());
}
return results;
}
/**
* Cleans up transactions and deleted nodes that are older than the given minimum age.
*
* @param minAge the minimum age of a transaction or deleted node
* @return Returns log message results
*/
private List<String> cleanUpTransactions(long minAge)
{
return null;
}
} }

View File

@@ -39,6 +39,7 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap; import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.node.BaseNodeServiceTest; import org.alfresco.repo.node.BaseNodeServiceTest;
import org.alfresco.repo.node.StoreArchiveMap; import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.cleanup.NodeCleanupRegistry;
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler; import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
@@ -476,8 +477,14 @@ public class DbNodeServiceImplTest extends BaseNodeServiceTest
setComplete(); setComplete();
endTransaction(); endTransaction();
NodeCleanupRegistry nodeCleanupRegistry = new NodeCleanupRegistry();
DbNodeServiceImpl.MoveChildrenToCorrectStore worker = new DbNodeServiceImpl.MoveChildrenToCorrectStore();
worker.setTransactionService(transactionService);
worker.setDbNodeService(ns);
worker.setNodeDaoService(nodeDaoService);
// Run cleanup // Run cleanup
ns.cleanup(); worker.doClean();
} }
/** /**

View File

@@ -0,0 +1,244 @@
package org.alfresco.repo.node.db;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.alfresco.repo.node.cleanup.AbstractNodeCleanupWorker;
import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.util.Pair;
import org.apache.commons.lang.mutable.MutableLong;
/**
* Cleans up deleted nodes and dangling transactions that are old enough.
*
* @author Derek Hulley
* @since 2.2 SP2
*/
public class DeletedNodeCleanupWorker extends AbstractNodeCleanupWorker
{
private long minPurgeAgeMs;
/**
* Default constructor
*/
public DeletedNodeCleanupWorker()
{
minPurgeAgeMs = 7L * 24L * 3600L * 1000L;
}
/**
* {@inheritDoc}
*/
protected List<String> doCleanInternal() throws Throwable
{
List<String> purgedNodes = purgeOldDeletedNodes(minPurgeAgeMs);
List<String> purgedTxns = purgeOldEmptyTransactions(minPurgeAgeMs);
List<String> allResults = new ArrayList<String>(100);
allResults.addAll(purgedNodes);
allResults.addAll(purgedTxns);
// Done
return allResults;
}
/**
* Set the minimum age (days) that nodes and transactions must be before they get purged.
* The default is 7 days.
*
* @param minPurgeAgeDays the minimum age (in days) before nodes and transactions get purged
*/
public void setMinPurgeAgeDays(int minPurgeAgeDays)
{
this.minPurgeAgeMs = ((long) minPurgeAgeDays) * 24L * 3600L * 1000L;
}
private static final int NODE_PURGE_BATCH_SIZE = 1000;
/**
* Cleans up deleted nodes that are older than the given minimum age.
*
* @param minAge the minimum age of a transaction or deleted node
* @return Returns log message results
*/
private List<String> purgeOldDeletedNodes(long minAge)
{
if (minAge < 0)
{
return Collections.emptyList();
}
final List<String> results = new ArrayList<String>(100);
final MutableLong minNodeId = new MutableLong(0L);
final long maxCommitTime = System.currentTimeMillis() - minAge;
RetryingTransactionCallback<Integer> purgeNodesCallback = new RetryingTransactionCallback<Integer>()
{
public Integer execute() throws Throwable
{
final List<Pair<Long, NodeRef>> nodePairs = new ArrayList<Pair<Long, NodeRef>>(NODE_PURGE_BATCH_SIZE);
NodeRefQueryCallback callback = new NodeRefQueryCallback()
{
public boolean handle(Pair<Long, NodeRef> nodePair)
{
nodePairs.add(nodePair);
return true;
}
};
nodeDaoService.getNodesDeletedInOldTxns(minNodeId.longValue(), maxCommitTime, NODE_PURGE_BATCH_SIZE, callback);
for (Pair<Long, NodeRef> nodePair : nodePairs)
{
Long nodeId = nodePair.getFirst();
nodeDaoService.purgeNode(nodeId);
// Update the min node ID for the next query
if (nodeId.longValue() > minNodeId.longValue())
{
minNodeId.setValue(nodeId.longValue());
}
}
return nodePairs.size();
}
};
while (true)
{
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
txnHelper.setMaxRetries(5); // Limit number of retries
txnHelper.setRetryWaitIncrementMs(1000); // 1 second to allow other cleanups time to get through
// Get nodes to delete
Integer purgeCount = new Integer(0);
// Purge nodes
try
{
purgeCount = txnHelper.doInTransaction(purgeNodesCallback, false, true);
if (purgeCount.intValue() > 0)
{
String msg =
"Purged old nodes: \n" +
" Min node ID: " + minNodeId.longValue() + "\n" +
" Batch size: " + NODE_PURGE_BATCH_SIZE + "\n" +
" Max commit time: " + maxCommitTime + "\n" +
" Purge count: " + purgeCount;
results.add(msg);
}
}
catch (Throwable e)
{
String msg =
"Failed to purge nodes." +
" Set log level to WARN for this class to get exception log: \n" +
" Min node ID: " + minNodeId.longValue() + "\n" +
" Batch size: " + NODE_PURGE_BATCH_SIZE + "\n" +
" Max commit time: " + maxCommitTime + "\n" +
" Error: " + e.getMessage();
// It failed; do a full log in WARN mode
if (logger.isWarnEnabled())
{
logger.warn(msg, e);
}
else
{
logger.error(msg);
}
results.add(msg);
break;
}
if (purgeCount.intValue() == 0)
{
break;
}
}
// Done
return results;
}
private static final int TXN_PURGE_BATCH_SIZE = 50;
/**
* Cleans up unused transactions that are older than the given minimum age.
*
* @param minAge the minimum age of a transaction or deleted node
* @return Returns log message results
*/
private List<String> purgeOldEmptyTransactions(long minAge)
{
if (minAge < 0)
{
return Collections.emptyList();
}
final List<String> results = new ArrayList<String>(100);
final MutableLong minTxnId = new MutableLong(0L);
final long maxCommitTime = System.currentTimeMillis() - minAge;
RetryingTransactionCallback<Integer> purgeTxnsCallback = new RetryingTransactionCallback<Integer>()
{
public Integer execute() throws Throwable
{
final List<Long> txnIds = nodeDaoService.getTxnsUnused(
minTxnId.longValue(),
maxCommitTime,
TXN_PURGE_BATCH_SIZE);
for (Long txnId : txnIds)
{
nodeDaoService.purgeTxn(txnId);
// Update the min node ID for the next query
if (txnId.longValue() > minTxnId.longValue())
{
minTxnId.setValue(txnId.longValue());
}
}
return txnIds.size();
}
};
while (true)
{
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
txnHelper.setMaxRetries(5); // Limit number of retries
txnHelper.setRetryWaitIncrementMs(1000); // 1 second to allow other cleanups time to get through
// Get nodes to delete
Integer purgeCount = new Integer(0);
// Purge nodes
try
{
purgeCount = txnHelper.doInTransaction(purgeTxnsCallback, false, true);
if (purgeCount.intValue() > 0)
{
String msg =
"Purged old txns: \n" +
" Min txn ID: " + minTxnId.longValue() + "\n" +
" Batch size: " + TXN_PURGE_BATCH_SIZE + "\n" +
" Max commit time: " + maxCommitTime + "\n" +
" Purge count: " + purgeCount;
results.add(msg);
}
}
catch (Throwable e)
{
String msg =
"Failed to purge txns." +
" Set log level to WARN for this class to get exception log: \n" +
" Min txn ID: " + minTxnId.longValue() + "\n" +
" Batch size: " + TXN_PURGE_BATCH_SIZE + "\n" +
" Max commit time: " + maxCommitTime + "\n" +
" Error: " + e.getMessage();
// It failed; do a full log in WARN mode
if (logger.isWarnEnabled())
{
logger.warn(msg, e);
}
else
{
logger.error(msg);
}
results.add(msg);
break;
}
if (purgeCount.intValue() == 0)
{
break;
}
}
// Done
return results;
}
}

View File

@@ -0,0 +1,124 @@
package org.alfresco.repo.node.db;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.node.cleanup.AbstractNodeCleanupWorker;
import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.util.Pair;
/**
* Indexes child nodes where cascade re-indexing is disabled.
*
* @author Derek Hulley
* @since 2.2 SP2
*/
public class IndexChildrenWhereRequiredWorker extends AbstractNodeCleanupWorker
{
/**
* Default constructor
*/
public IndexChildrenWhereRequiredWorker()
{
}
/**
* {@inheritDoc}
*/
protected List<String> doCleanInternal() throws Throwable
{
List<String> indexChildrenResults = indexChildrenWhereRequired();
List<String> allResults = new ArrayList<String>(100);
allResults.addAll(indexChildrenResults);
// Done
return allResults;
}
private List<String> indexChildrenWhereRequired()
{
final List<Pair<Long, NodeRef>> parentNodePairs = new ArrayList<Pair<Long, NodeRef>>(100);
final NodeRefQueryCallback callback = new NodeRefQueryCallback()
{
public boolean handle(Pair<Long, NodeRef> nodePair)
{
parentNodePairs.add(nodePair);
return true;
}
};
RetryingTransactionCallback<Object> getNodesCallback = new RetryingTransactionCallback<Object>()
{
public Object execute() throws Throwable
{
nodeDaoService.getNodesWithAspect(ContentModel.ASPECT_INDEX_CHILDREN, Long.MIN_VALUE, 100, callback);
// Done
return null;
}
};
transactionService.getRetryingTransactionHelper().doInTransaction(getNodesCallback, true, true);
// Process the nodes in random order
Collections.shuffle(parentNodePairs);
// Iterate and operate
List<String> results = new ArrayList<String>(100);
for (final Pair<Long, NodeRef> parentNodePair : parentNodePairs)
{
RetryingTransactionCallback<String> indexChildrenCallback = new RetryingTransactionCallback<String>()
{
public String execute() throws Throwable
{
// Index children without full cascade
dbNodeService.indexChildren(parentNodePair, true);
// Done
return null;
}
};
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
txnHelper.setMaxRetries(1);
try
{
txnHelper.doInTransaction(indexChildrenCallback, false, true);
String msg =
"Indexed child nodes: \n" +
" Parent node: " + parentNodePair.getFirst();
results.add(msg);
}
catch (Throwable e)
{
String msg =
"Failed to index child nodes." +
" Set log level to WARN for this class to get exception log: \n" +
" Parent node: " + parentNodePair.getFirst() + "\n" +
" Error: " + e.getMessage();
// It failed; do a full log in WARN mode
if (logger.isWarnEnabled())
{
logger.warn(msg, e);
}
else
{
logger.error(msg);
}
results.add(msg);
}
}
// Done
if (logger.isDebugEnabled())
{
StringBuilder sb = new StringBuilder(256);
sb.append("Indexed child nodes: \n")
.append(" Results:\n");
for (String msg : results)
{
sb.append(" ").append(msg).append("\n");
}
logger.debug(sb.toString());
}
return results;
}
}

View File

@@ -69,7 +69,7 @@ public interface NodeDaoService
* @return Returns a list of stores * @return Returns a list of stores
*/ */
@DirtySessionAnnotation(markDirty=false) @DirtySessionAnnotation(markDirty=false)
public List<StoreRef> getStoreRefs(); public List<Pair<Long, StoreRef>> getStores();
@DirtySessionAnnotation(markDirty=false) @DirtySessionAnnotation(markDirty=false)
public Pair<Long, NodeRef> getRootNode(StoreRef storeRef); public Pair<Long, NodeRef> getRootNode(StoreRef storeRef);
@@ -164,11 +164,19 @@ public interface NodeDaoService
public boolean hasNodeAspect(Long nodeId, QName aspectQName); public boolean hasNodeAspect(Long nodeId, QName aspectQName);
/** /**
* Deletes the node and all entities * Deletes the node and all entities. Note that the node entry will still exist and be
* associated with a live transaction.
*/ */
@DirtySessionAnnotation(markDirty=true) @DirtySessionAnnotation(markDirty=true)
public void deleteNode(Long nodeId); public void deleteNode(Long nodeId);
/**
* Remove all traces of the node. This assumes that the node has been marked
* for deletion using {@link #deleteNode(Long)}.
*/
@DirtySessionAnnotation(markDirty=true)
public void purgeNode(Long nodeId);
/** /**
* @param name the <b>cm:name</b> to apply to the association * @param name the <b>cm:name</b> to apply to the association
* @return Returns the persisted and filled association's ID * @return Returns the persisted and filled association's ID
@@ -286,8 +294,21 @@ public interface NodeDaoService
boolean handle(Pair<Long, NodeRef> nodePair); boolean handle(Pair<Long, NodeRef> nodePair);
} }
/**
* Gets a set of nodes that have parents in the given store, but are themselves located in a different
* store.
*
* @param storeId the store of the parent nodes
* @param minNodeId the min node ID to return
* @param count the maximum number of results
* @param resultsCallback the node callback
*/
@DirtySessionAnnotation(markDirty=false) @DirtySessionAnnotation(markDirty=false)
public void getNodesWithChildrenInDifferentStores(Long minNodeId, int count, NodeRefQueryCallback resultsCallback); public void getNodesWithChildrenInDifferentStore(
Long storeId,
Long minNodeId,
int count,
NodeRefQueryCallback resultsCallback);
@DirtySessionAnnotation(markDirty=false) @DirtySessionAnnotation(markDirty=false)
public void getNodesWithAspect(QName aspectQName, Long minNodeId, int count, NodeRefQueryCallback resultsCallback); public void getNodesWithAspect(QName aspectQName, Long minNodeId, int count, NodeRefQueryCallback resultsCallback);
@@ -454,6 +475,17 @@ public interface NodeDaoService
@DirtySessionAnnotation(markDirty=true) @DirtySessionAnnotation(markDirty=true)
public void getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition, NodePropertyHandler handler); public void getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition, NodePropertyHandler handler);
/**
* Gets a batch of deleted nodes in old transactions.
*
* @param minNodeId the minimum node ID
* @param maxCommitTime the maximum commit time (to set a minimum transaction age)
* @param count the maximum number of results (for batching)
* @param resultsCallback the callback to pass results back
*/
@DirtySessionAnnotation(markDirty=false)
public void getNodesDeletedInOldTxns(Long minNodeId, long maxCommitTime, int count, NodeRefQueryCallback resultsCallback);
/** /**
* Iterface to handle callbacks when iterating over properties * Iterface to handle callbacks when iterating over properties
* *
@@ -465,6 +497,20 @@ public interface NodeDaoService
void handle(NodeRef nodeRef, QName nodeTypeQName, QName propertyQName, Serializable value); void handle(NodeRef nodeRef, QName nodeTypeQName, QName propertyQName, Serializable value);
} }
/**
* Retrieves the maximum transaction ID for which the commit time is less than the given time.
*
* @param maxCommitTime the max commit time (ms)
* @return the last transaction <i>on or before</i> the given time
*/
@DirtySessionAnnotation(markDirty=true)
public Long getMaxTxnIdByCommitTime(final long maxCommitTime);
/**
* Retrieves a specific transaction.
*
* @param txnId the unique transaction ID.
* @return the requested transaction or <tt>null</tt>
*/
@DirtySessionAnnotation(markDirty=true) @DirtySessionAnnotation(markDirty=true)
public Transaction getTxnById(long txnId); public Transaction getTxnById(long txnId);
/** /**
@@ -518,4 +564,10 @@ public interface NodeDaoService
@DirtySessionAnnotation(markDirty=false) @DirtySessionAnnotation(markDirty=false)
public List<NodeRef> getTxnChanges(final long txnId); public List<NodeRef> getTxnChanges(final long txnId);
@DirtySessionAnnotation(markDirty=false)
public List<Long> getTxnsUnused(Long minTxnId, long maxCommitTime, int count);
@DirtySessionAnnotation(markDirty=true)
public void purgeTxn(Long txnId);
} }

View File

@@ -107,6 +107,7 @@ import org.alfresco.util.Pair;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.hibernate.Criteria; import org.hibernate.Criteria;
import org.hibernate.ObjectNotFoundException;
import org.hibernate.Query; import org.hibernate.Query;
import org.hibernate.ScrollMode; import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults; import org.hibernate.ScrollableResults;
@@ -137,7 +138,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private static final String QUERY_GET_CHILD_ASSOC_REFS_BY_CHILD_TYPEQNAME = "node.GetChildAssocRefsByChildTypeQName"; private static final String QUERY_GET_CHILD_ASSOC_REFS_BY_CHILD_TYPEQNAME = "node.GetChildAssocRefsByChildTypeQName";
private static final String QUERY_GET_PRIMARY_CHILD_ASSOCS = "node.GetPrimaryChildAssocs"; private static final String QUERY_GET_PRIMARY_CHILD_ASSOCS = "node.GetPrimaryChildAssocs";
private static final String QUERY_GET_PRIMARY_CHILD_ASSOCS_NOT_IN_SAME_STORE = "node.GetPrimaryChildAssocsNotInSameStore"; private static final String QUERY_GET_PRIMARY_CHILD_ASSOCS_NOT_IN_SAME_STORE = "node.GetPrimaryChildAssocsNotInSameStore";
private static final String QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORES ="node.GetNodesWithChildrenInDifferentStores"; private static final String QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORE ="node.GetNodesWithChildrenInDifferentStore";
private static final String QUERY_GET_NODES_WITH_ASPECT ="node.GetNodesWithAspect"; private static final String QUERY_GET_NODES_WITH_ASPECT ="node.GetNodesWithAspect";
private static final String QUERY_GET_PARENT_ASSOCS = "node.GetParentAssocs"; private static final String QUERY_GET_PARENT_ASSOCS = "node.GetParentAssocs";
private static final String QUERY_GET_NODE_ASSOC = "node.GetNodeAssoc"; private static final String QUERY_GET_NODE_ASSOC = "node.GetNodeAssoc";
@@ -149,6 +150,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private static final String QUERY_GET_USERS_WITHOUT_USAGE = "node.GetUsersWithoutUsage"; private static final String QUERY_GET_USERS_WITHOUT_USAGE = "node.GetUsersWithoutUsage";
private static final String QUERY_GET_USERS_WITH_USAGE = "node.GetUsersWithUsage"; private static final String QUERY_GET_USERS_WITH_USAGE = "node.GetUsersWithUsage";
private static final String QUERY_GET_NODES_WITH_PROPERTY_VALUES_BY_ACTUAL_TYPE = "node.GetNodesWithPropertyValuesByActualType"; private static final String QUERY_GET_NODES_WITH_PROPERTY_VALUES_BY_ACTUAL_TYPE = "node.GetNodesWithPropertyValuesByActualType";
private static final String QUERY_GET_DELETED_NODES_BY_MAX_TXNID = "node.GetDeletedNodesByMaxTxnId";
private static final String QUERY_GET_SERVER_BY_IPADDRESS = "server.getServerByIpAddress"; private static final String QUERY_GET_SERVER_BY_IPADDRESS = "server.getServerByIpAddress";
private static final Long NULL_CACHE_VALUE = new Long(-1); private static final Long NULL_CACHE_VALUE = new Long(-1);
@@ -521,14 +523,14 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
* *
* @param nodeId the node's ID * @param nodeId the node's ID
* @return the node * @return the node
* @throws AlfrescoRuntimeException if the ID doesn't refer to a node. * @throws ObjectNotFoundException if the ID doesn't refer to a node.
*/ */
private Node getNodeNotNull(Long nodeId) private Node getNodeNotNull(Long nodeId)
{ {
Node node = (Node) getHibernateTemplate().get(NodeImpl.class, nodeId); Node node = (Node) getHibernateTemplate().get(NodeImpl.class, nodeId);
if (node == null) if (node == null)
{ {
throw new AlfrescoRuntimeException("Node ID " + nodeId + " is invalid"); throw new ObjectNotFoundException(nodeId, NodeImpl.class.getName());
} }
return node; return node;
} }
@@ -573,7 +575,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
* @see #QUERY_GET_ALL_STORES * @see #QUERY_GET_ALL_STORES
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public List<StoreRef> getStoreRefs() public List<Pair<Long, StoreRef>> getStores()
{ {
HibernateCallback callback = new HibernateCallback() HibernateCallback callback = new HibernateCallback()
{ {
@@ -585,10 +587,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
} }
}; };
List<Store> stores = (List) getHibernateTemplate().execute(callback); List<Store> stores = (List) getHibernateTemplate().execute(callback);
List<StoreRef> storeRefs = new ArrayList<StoreRef>(stores.size()); List<Pair<Long, StoreRef>> storeRefs = new ArrayList<Pair<Long, StoreRef>>(stores.size());
for (Store store : stores) for (Store store : stores)
{ {
storeRefs.add(store.getStoreRef()); Pair<Long, StoreRef> storePair = new Pair<Long, StoreRef>(store.getId(), store.getStoreRef());
storeRefs.add(storePair);
} }
// done // done
return storeRefs; return storeRefs;
@@ -714,17 +717,19 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return query.uniqueResult(); return query.uniqueResult();
} }
}; };
Node node = (Node) getHibernateTemplate().execute(callback); Object[] result = (Object[]) getHibernateTemplate().execute(callback);
// Cache the value // Cache the value
if (node == null) final Node node;
if (result == null)
{ {
node = null;
storeAndNodeIdCache.put(nodeRef, NULL_CACHE_VALUE); storeAndNodeIdCache.put(nodeRef, NULL_CACHE_VALUE);
} }
else else
{ {
node = (Node) result[0];
storeAndNodeIdCache.put(nodeRef, node.getId()); storeAndNodeIdCache.put(nodeRef, node.getId());
} }
// TODO: Fill cache here
return node; return node;
} }
@@ -1336,6 +1341,18 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
recordNodeDelete(node); recordNodeDelete(node);
} }
/**
* Final purge of the node entry. No transaction recording is done for this.
*/
public void purgeNode(Long nodeId)
{
Node node = (Node) getSession().get(NodeImpl.class, nodeId);
if (node != null)
{
getHibernateTemplate().delete(node);
}
}
private static final String QUERY_DELETE_PARENT_ASSOCS = "node.DeleteParentAssocs"; private static final String QUERY_DELETE_PARENT_ASSOCS = "node.DeleteParentAssocs";
private static final String QUERY_DELETE_CHILD_ASSOCS = "node.DeleteChildAssocs"; private static final String QUERY_DELETE_CHILD_ASSOCS = "node.DeleteChildAssocs";
private static final String QUERY_DELETE_NODE_ASSOCS = "node.DeleteNodeAssocs"; private static final String QUERY_DELETE_NODE_ASSOCS = "node.DeleteNodeAssocs";
@@ -2364,14 +2381,19 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
// Done // Done
} }
public void getNodesWithChildrenInDifferentStores(final Long minNodeId, final int count, NodeRefQueryCallback resultsCallback) public void getNodesWithChildrenInDifferentStore(
final Long storeId,
final Long minNodeId,
final int count,
NodeRefQueryCallback resultsCallback)
{ {
HibernateCallback callback = new HibernateCallback() HibernateCallback callback = new HibernateCallback()
{ {
public Object doInHibernate(Session session) public Object doInHibernate(Session session)
{ {
Query query = session Query query = session
.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORES) .getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_NODES_WITH_CHILDREN_IN_DIFFERENT_STORE)
.setLong("parentStoreId", storeId)
.setLong("minNodeId", minNodeId) .setLong("minNodeId", minNodeId)
.setMaxResults(count); .setMaxResults(count);
DirtySessionMethodInterceptor.setQueryFlushMode(session, query); DirtySessionMethodInterceptor.setQueryFlushMode(session, query);
@@ -2397,10 +2419,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
/** /**
* <pre> * <pre>
Long parentId = (Long) row[0]; Node ID = (Long) row[0];
String parentProtocol = (String) row[1]; Node Protocol = (String) row[1];
String parentIdentifier = (String) row[2]; Node Identifier = (String) row[2];
String parentUuid = (String) row[3]; Node Uuid = (String) row[3];
* </pre> * </pre>
*/ */
private void processNodeResults(ScrollableResults queryResults, NodeRefQueryCallback resultsCallback) private void processNodeResults(ScrollableResults queryResults, NodeRefQueryCallback resultsCallback)
@@ -3125,12 +3147,57 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
} }
} }
@SuppressWarnings("unchecked")
public void getNodesDeletedInOldTxns(
final Long minNodeId,
long maxCommitTime,
final int count,
NodeRefQueryCallback resultsCallback)
{
// Get the max transaction ID
final Long maxTxnId = getMaxTxnIdByCommitTime(maxCommitTime);
// Shortcut
if (maxTxnId == null)
{
return;
}
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_DELETED_NODES_BY_MAX_TXNID);
query.setLong("minNodeId", minNodeId);
query.setLong("maxTxnId", maxTxnId);
query.setMaxResults(count);
query.setReadOnly(true);
return query.scroll(ScrollMode.FORWARD_ONLY);
}
};
ScrollableResults queryResults = null;
try
{
queryResults = (ScrollableResults) getHibernateTemplate().execute(callback);
processNodeResults(queryResults, resultsCallback);
}
finally
{
if (queryResults != null)
{
queryResults.close();
}
}
// Done
}
/* /*
* Queries for transactions * Queries for transactions
*/ */
private static final String QUERY_GET_TXN_BY_ID = "txn.GetTxnById"; private static final String QUERY_GET_TXN_BY_ID = "txn.GetTxnById";
private static final String QUERY_GET_MIN_COMMIT_TIME = "txn.GetMinCommitTime"; private static final String QUERY_GET_MIN_COMMIT_TIME = "txn.GetMinCommitTime";
private static final String QUERY_GET_MAX_COMMIT_TIME = "txn.GetMaxCommitTime"; private static final String QUERY_GET_MAX_COMMIT_TIME = "txn.GetMaxCommitTime";
private static final String QUERY_GET_MAX_ID_BY_COMMIT_TIME = "txn.GetMaxIdByCommitTime";
private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_ASC = "txn.GetTxnsByCommitTimeAsc"; private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_ASC = "txn.GetTxnsByCommitTimeAsc";
private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_DESC = "txn.GetTxnsByCommitTimeDesc"; private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_DESC = "txn.GetTxnsByCommitTimeDesc";
private static final String QUERY_GET_SELECTED_TXNS_BY_COMMIT_TIME_ASC = "txn.GetSelectedTxnsByCommitAsc"; private static final String QUERY_GET_SELECTED_TXNS_BY_COMMIT_TIME_ASC = "txn.GetSelectedTxnsByCommitAsc";
@@ -3139,6 +3206,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private static final String QUERY_COUNT_TRANSACTIONS = "txn.CountTransactions"; private static final String QUERY_COUNT_TRANSACTIONS = "txn.CountTransactions";
private static final String QUERY_GET_TXN_CHANGES_FOR_STORE = "txn.GetTxnChangesForStore"; private static final String QUERY_GET_TXN_CHANGES_FOR_STORE = "txn.GetTxnChangesForStore";
private static final String QUERY_GET_TXN_CHANGES = "txn.GetTxnChanges"; private static final String QUERY_GET_TXN_CHANGES = "txn.GetTxnChanges";
private static final String QUERY_GET_TXNS_UNUSED = "txn.GetTxnsUnused";
public Transaction getTxnById(final long txnId) public Transaction getTxnById(final long txnId)
{ {
@@ -3190,6 +3258,23 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return (commitTime == null) ? 0L : commitTime; return (commitTime == null) ? 0L : commitTime;
} }
public Long getMaxTxnIdByCommitTime(final long maxCommitTime)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_MAX_ID_BY_COMMIT_TIME);
query.setLong("maxCommitTime", maxCommitTime);
query.setReadOnly(true);
return query.uniqueResult();
}
};
Long txnId = (Long) getHibernateTemplate().execute(callback);
// done
return txnId;
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public List<Transaction> getTxnsByMinCommitTime(final List<Long> includeTxnIds) public List<Transaction> getTxnsByMinCommitTime(final List<Long> includeTxnIds)
{ {
@@ -3518,6 +3603,36 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return nodeRefs; return nodeRefs;
} }
@SuppressWarnings("unchecked")
public List<Long> getTxnsUnused(final Long minTxnId, final long maxCommitTime, final int count)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session.getNamedQuery(QUERY_GET_TXNS_UNUSED);
query.setReadOnly(true)
.setMaxResults(count)
.setLong("minTxnId", minTxnId)
.setLong("maxCommitTime", maxCommitTime);
DirtySessionMethodInterceptor.setQueryFlushMode(session, query);
return query.list();
}
};
List<Long> results = (List<Long>) getHibernateTemplate().execute(callback);
// done
return results;
}
public void purgeTxn(Long txnId)
{
Transaction txn = (Transaction) getSession().get(TransactionImpl.class, txnId);
if (txn != null)
{
getHibernateTemplate().delete(txn);
}
}
//============ PROPERTY HELPER METHODS =================// //============ PROPERTY HELPER METHODS =================//
public static Map<PropertyMapKey, NodePropertyValue> convertToPersistentProperties( public static Map<PropertyMapKey, NodePropertyValue> convertToPersistentProperties(

View File

@@ -24,6 +24,9 @@
*/ */
package org.alfresco.repo.node.index; package org.alfresco.repo.node.index;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
@@ -785,6 +788,7 @@ public abstract class AbstractReindexComponent implements IndexRecovery
id, id,
e.getMessage()); e.getMessage());
loggerOnThread.warn(msg); loggerOnThread.warn(msg);
loggerOnThread.warn(getStackTrace(e));
} }
catch (Throwable e) catch (Throwable e)
{ {
@@ -793,6 +797,7 @@ public abstract class AbstractReindexComponent implements IndexRecovery
id, id,
e.getMessage()); e.getMessage());
loggerOnThread.error(msg); loggerOnThread.error(msg);
loggerOnThread.warn(getStackTrace(e));
} }
finally finally
{ {
@@ -800,6 +805,18 @@ public abstract class AbstractReindexComponent implements IndexRecovery
removeFromQueueAndProdHead(); removeFromQueueAndProdHead();
} }
} }
public String getStackTrace(Throwable t)
{
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
t.printStackTrace(pw);
pw.flush();
sw.flush();
return sw.toString();
}
public synchronized void reindexedNode(NodeRef nodeRef) public synchronized void reindexedNode(NodeRef nodeRef)
{ {
// Check for forced kill // Check for forced kill

View File

@@ -62,20 +62,6 @@ import org.alfresco.service.namespace.QNamePattern;
@PublicService @PublicService
public interface NodeService public interface NodeService
{ {
/**
* Kick off any cleanup processes relating to the the particular implementation.
* <p>
* This must cover cleanup of orphaned data and other housekeeping tasks that may
* be required.
* <p>
* <b>NB:</b> Implementations should guard against multithreaded entry without
* blocking.
*
* @return Returns a list of messages detailing what was done.
*/
@Auditable(key = Auditable.Key.NO_KEY)
public List<String> cleanup();
/** /**
* Gets a list of all available node store references * Gets a list of all available node store references
* *