Merged V3.2 to HEAD

17574: Merged in DEV work for ContentStoreCleaner: ETHREEOH-2813
      17432: Build up for fix of ETHREEOH-2813: ContentStoreCleaner doesn't scale
      17546: ContentStoreCleaner fixes and further tests
      17524: Unit tests and bulk queries for orphaned content
      17506: W.I.P. for content cleaner for V3.2: ETHREEOH-2813
   17575: Missed check-in (other DB create scripts look OK)
   17577: Re-activated 'contentStoreCleanerTrigger'
          - Added system property: system.content.orphanCleanup.cronExpression=0 0 4 * * ?
          - Other useful properties:
              system.content.eagerOrphanCleanup=false
              system.content.orphanProtectDays=14
   17578: Fixed MT test and sample contexts after recent content cleaner changes
   17579: Fixed DB2 unique index creation for content URLs
   17580: First pass at fix for ETHREEOH-3454: Port enterprise upgrade scripts for ContentStoreCleaner changes
___________________________________________________________________
Modified: svn:mergeinfo
   Merged /alfresco/BRANCHES/V3.2:r17574-17575,17577-17580


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@18151 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2010-01-19 15:26:46 +00:00
parent a650eb2a1c
commit f89af49875
37 changed files with 1033 additions and 1031 deletions

View File

@@ -24,39 +24,32 @@
*/
package org.alfresco.repo.content.cleanup;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.domain.avm.AVMNodeDAO;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.domain.contentclean.ContentCleanDAO;
import org.alfresco.repo.domain.contentclean.ContentCleanDAO.ContentUrlBatchProcessor;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.repo.domain.contentdata.ContentDataDAO.ContentUrlHandler;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.springframework.extensions.surf.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.Pair;
import org.springframework.extensions.surf.util.PropertyCheck;
/**
* This component is responsible cleaning up orphaned content.
* <p/>
* <b>TODO: Fix up new comments</b>
*
* Clean-up happens at two levels.<p/>
* <u><b>Eager cleanup:</b></u> (since 3.2)<p/>
* If {@link #setEagerOrphanCleanup(boolean) eager cleanup} is activated, then this
@@ -88,30 +81,38 @@ import org.apache.commons.logging.LogFactory;
*/
public class ContentStoreCleaner
{
private static final QName LOCK_QNAME = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentStoreCleaner");
private static final long LOCK_TTL = 30000L;
private static ThreadLocal<Pair<Long, String>> lockThreadLocal = new ThreadLocal<Pair<Long, String>>();
private static Log logger = LogFactory.getLog(ContentStoreCleaner.class);
/** kept to notify the thread that it should quit */
private static VmShutdownListener vmShutdownListener = new VmShutdownListener("ContentStoreCleaner");
private EagerContentStoreCleaner eagerContentStoreCleaner;
private JobLockService jobLockService;
private ContentCleanDAO contentCleanDAO;
private ContentDataDAO contentDataDAO;
private DictionaryService dictionaryService;
private ContentService contentService;
private NodeDaoService nodeDaoService;
private AVMNodeDAO avmNodeDAO;
private TransactionService transactionService;
private List<ContentStore> stores;
private List<ContentStoreCleanerListener> listeners;
private int protectDays;
public ContentStoreCleaner()
{
this.stores = new ArrayList<ContentStore>(0);
this.listeners = new ArrayList<ContentStoreCleanerListener>(0);
this.protectDays = 7;
}
/**
* Set the component that will do the physical deleting
*/
public void setEagerContentStoreCleaner(EagerContentStoreCleaner eagerContentStoreCleaner)
{
this.eagerContentStoreCleaner = eagerContentStoreCleaner;
}
/**
* @param jobLockService service used to ensure that cleanup runs are not duplicated
*/
@@ -120,14 +121,6 @@ public class ContentStoreCleaner
this.jobLockService = jobLockService;
}
/**
* @param contentCleanDAO DAO used for manipulating content URLs
*/
public void setContentCleanDAO(ContentCleanDAO contentCleanDAO)
{
this.contentCleanDAO = contentCleanDAO;
}
/**
* @param contentDataDAO DAO used for enumerating DM content URLs
*/
@@ -176,22 +169,6 @@ public class ContentStoreCleaner
this.transactionService = transactionService;
}
/**
* @param stores the content stores to clean
*/
public void setStores(List<ContentStore> stores)
{
this.stores = stores;
}
/**
* @param listeners the listeners that can react to deletions
*/
public void setListeners(List<ContentStoreCleanerListener> listeners)
{
this.listeners = listeners;
}
/**
* Set the minimum number of days old that orphaned content must be
* before deletion is possible. The default is 7 days.
@@ -217,14 +194,13 @@ public class ContentStoreCleaner
private void checkProperties()
{
PropertyCheck.mandatory(this, "jobLockService", jobLockService);
PropertyCheck.mandatory(this, "contentCleanerDAO", contentCleanDAO);
PropertyCheck.mandatory(this, "contentDataDAO", contentDataDAO);
PropertyCheck.mandatory(this, "dictionaryService", dictionaryService);
PropertyCheck.mandatory(this, "contentService", contentService);
PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService);
PropertyCheck.mandatory(this, "avmNodeDAO", avmNodeDAO);
PropertyCheck.mandatory(this, "transactionService", transactionService);
PropertyCheck.mandatory(this, "listeners", listeners);
PropertyCheck.mandatory(this, "eagerContentStoreCleaner", eagerContentStoreCleaner);
// check the protect days
if (protectDays < 0)
@@ -235,156 +211,70 @@ public class ContentStoreCleaner
{
logger.warn(
"Property 'protectDays' is set to 0. " +
"It is possible that in-transaction content will be deleted.");
"Please ensure that your backup strategy is appropriate for this setting.");
}
}
private void removeContentUrlsPresentInMetadata(final ContentUrlBatchProcessor urlRemover)
/**
* Lazily update the job lock
*/
private void refreshLock()
{
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
// Remove all the Content URLs for the ADM repository
// Handlers that record the URLs
final ContentDataDAO.ContentUrlHandler contentUrlHandler = new ContentDataDAO.ContentUrlHandler()
Pair<Long, String> lockPair = lockThreadLocal.get();
if (lockPair == null)
{
long lastLock = 0L;
public void handle(String contentUrl)
String lockToken = jobLockService.getLock(LOCK_QNAME, LOCK_TTL);
Long lastLock = new Long(System.currentTimeMillis());
// We have not locked before
lockPair = new Pair<Long, String>(lastLock, lockToken);
lockThreadLocal.set(lockPair);
}
else
{
long now = System.currentTimeMillis();
long lastLock = lockPair.getFirst().longValue();
String lockToken = lockPair.getSecond();
// Only refresh the lock if we are past a threshold
if (now - lastLock > (long)(LOCK_TTL/2L))
{
if (vmShutdownListener.isVmShuttingDown())
{
throw new VmShutdownException();
}
urlRemover.processContentUrl(contentUrl);
// Check lock
long now = System.currentTimeMillis();
if (now - lastLock > (long)(LOCK_TTL/2L))
{
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
lastLock = now;
}
jobLockService.refreshLock(lockToken, LOCK_QNAME, LOCK_TTL);
lastLock = System.currentTimeMillis();
lockPair = new Pair<Long, String>(lastLock, lockToken);
}
};
final NodePropertyHandler nodePropertyHandler = new NodePropertyHandler()
{
long lastLock = 0L;
public void handle(NodeRef nodeRef, QName nodeTypeQName, QName propertyQName, Serializable value)
{
if (vmShutdownListener.isVmShuttingDown())
{
throw new VmShutdownException();
}
// Convert the values to ContentData and extract the URLs
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
String contentUrl = contentData.getContentUrl();
if (contentUrl != null)
{
urlRemover.processContentUrl(contentUrl);
}
// Check lock
long now = System.currentTimeMillis();
if (now - lastLock > (long)(LOCK_TTL/2L))
{
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
lastLock = now;
}
}
};
final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
// execute in READ-WRITE txn
RetryingTransactionCallback<Void> getUrlsCallback = new RetryingTransactionCallback<Void>()
{
public Void execute() throws Exception
{
contentDataDAO.getAllContentUrls(contentUrlHandler);
nodeDaoService.getPropertyValuesByActualType(contentDataType, nodePropertyHandler);
return null;
};
};
txnHelper.doInTransaction(getUrlsCallback);
// Do the same for the AVM repository.
final AVMNodeDAO.ContentUrlHandler handler = new AVMNodeDAO.ContentUrlHandler()
{
long lastLock = 0L;
public void handle(String contentUrl)
{
if (vmShutdownListener.isVmShuttingDown())
{
throw new VmShutdownException();
}
urlRemover.processContentUrl(contentUrl);
// Check lock
long now = System.currentTimeMillis();
if (now - lastLock > (long)(LOCK_TTL/2L))
{
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
lastLock = now;
}
}
};
// execute in READ-WRITE txn
RetryingTransactionCallback<Void> getAVMUrlsCallback = new RetryingTransactionCallback<Void>()
{
public Void execute() throws Exception
{
avmNodeDAO.getContentUrls(handler);
return null;
}
};
txnHelper.doInTransaction(getAVMUrlsCallback);
}
private void addContentUrlsPresentInStores(final ContentUrlBatchProcessor urlInserter)
{
org.alfresco.repo.content.ContentStore.ContentUrlHandler handler = new org.alfresco.repo.content.ContentStore.ContentUrlHandler()
{
long lastLock = 0L;
public void handle(String contentUrl)
{
if (vmShutdownListener.isVmShuttingDown())
{
throw new VmShutdownException();
}
urlInserter.processContentUrl(contentUrl);
// Check lock
long now = System.currentTimeMillis();
if (now - lastLock > (long)(LOCK_TTL/2L))
{
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
lastLock = now;
}
}
};
Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
for (ContentStore store : stores)
{
store.getUrls(null, checkAllBeforeDate, handler);
}
}
private static final QName LOCK_QNAME = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentStoreCleaner");
private static final long LOCK_TTL = 30000L;
/**
* Release the lock after the job completes
*/
private void releaseLock()
{
Pair<Long, String> lockPair = lockThreadLocal.get();
if (lockPair != null)
{
// We can't release without a token
try
{
jobLockService.releaseLock(lockPair.getSecond(), LOCK_QNAME);
}
finally
{
// Reset
lockThreadLocal.set(null);
}
}
// else: We can't release without a token
}
public void execute()
{
checkProperties();
RetryingTransactionCallback<Void> executeCallback = new RetryingTransactionCallback<Void>()
{
public Void execute() throws Exception
{
logger.debug("Content store cleanup started.");
// Get the lock without any waiting
// The lock will be refreshed, but the first lock starts the process
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
executeInternal();
return null;
}
};
try
{
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
txnHelper.setMaxRetries(0);
txnHelper.doInTransaction(executeCallback);
logger.debug("Content store cleanup started.");
refreshLock();
executeInternal();
// Done
if (logger.isDebugEnabled())
{
@@ -399,103 +289,67 @@ public class ContentStoreCleaner
logger.debug(" Content store cleanup aborted.");
}
}
finally
{
releaseLock();
}
}
public void executeInternal()
private void executeInternal()
{
final ContentUrlBatchProcessor storeUrlDeleteHandler = new ContentUrlBatchProcessor()
{
long lastLock = 0L;
public void start()
{
}
public void processContentUrl(String contentUrl)
{
for (ContentStore store : stores)
{
if (vmShutdownListener.isVmShuttingDown())
{
throw new VmShutdownException();
}
if (logger.isDebugEnabled())
{
if (store.isWriteSupported())
{
logger.debug(" Deleting content URL: " + contentUrl);
}
}
for (ContentStoreCleanerListener listener : listeners)
{
listener.beforeDelete(store, contentUrl);
}
// Delete
store.delete(contentUrl);
// Check lock
long now = System.currentTimeMillis();
if (now - lastLock > (long)(LOCK_TTL/2L))
{
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
lastLock = now;
}
}
}
public void end()
{
}
};
// execute in READ-WRITE txn
RetryingTransactionCallback<Void> executeCallback = new RetryingTransactionCallback<Void>()
RetryingTransactionCallback<Integer> getAndDeleteWork = new RetryingTransactionCallback<Integer>()
{
public Void execute() throws Exception
public Integer execute() throws Exception
{
// Clean up
contentCleanDAO.cleanUp();
// Push all store URLs in
ContentUrlBatchProcessor urlInserter = contentCleanDAO.getUrlInserter();
try
{
urlInserter.start();
addContentUrlsPresentInStores(urlInserter);
}
finally
{
urlInserter.end();
}
// Delete all content URLs
ContentUrlBatchProcessor urlRemover = contentCleanDAO.getUrlRemover();
try
{
urlRemover.start();
removeContentUrlsPresentInMetadata(urlRemover);
}
finally
{
urlRemover.end();
}
// Any remaining URLs are URls present in the stores but not in the metadata
contentCleanDAO.listAllUrls(storeUrlDeleteHandler);
// Clean up
contentCleanDAO.cleanUp();
return null;
return cleanBatch(1000);
};
};
try
while (true)
{
transactionService.getRetryingTransactionHelper().doInTransaction(executeCallback);
// Done
refreshLock();
Integer deleted = transactionService.getRetryingTransactionHelper().doInTransaction(getAndDeleteWork);
if (vmShutdownListener.isVmShuttingDown())
{
throw new VmShutdownException();
}
if (deleted.intValue() == 0)
{
// There is no more to process
break;
}
// There is still more to delete, so continue
if (logger.isDebugEnabled())
{
logger.debug(" Content store cleanup completed.");
logger.debug(" Removed " + deleted.intValue() + " orphaned content URLs.");
}
}
catch (VmShutdownException e)
// Done
}
private int cleanBatch(final int batchSize)
{
final List<Long> idsToDelete = new ArrayList<Long>(batchSize);
ContentUrlHandler contentUrlHandler = new ContentUrlHandler()
{
// Aborted
if (logger.isDebugEnabled())
public void handle(Long id, String contentUrl, Long orphanTime)
{
logger.debug(" Content store cleanup aborted.");
// Pass the content URL to the eager cleaner for post-commit handling
eagerContentStoreCleaner.registerOrphanedContentUrl(contentUrl, true);
idsToDelete.add(id);
}
};
final long maxOrphanTime = System.currentTimeMillis() - (protectDays * 24 * 3600 * 1000);
contentDataDAO.getContentUrlsOrphaned(contentUrlHandler, maxOrphanTime, batchSize);
// All the URLs have been passed off for eventual deletion.
// Just delete the DB data
int size = idsToDelete.size();
if (size > 0)
{
contentDataDAO.deleteContentUrls(idsToDelete);
}
// Done
return size;
}
/**

View File

@@ -30,6 +30,7 @@ import java.lang.reflect.Method;
import java.util.Collections;
import java.util.Date;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.EmptyContentReader;
@@ -48,11 +49,13 @@ import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.tools.Repository;
import org.alfresco.tools.ToolException;
import org.alfresco.util.GUID;
import org.alfresco.util.TempFileProvider;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.lang.mutable.MutableInt;
@@ -105,9 +108,9 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
dictionaryService = (DictionaryService) ctx.getBean("dictionaryService");
int orphanCount = 100000;
int orphanCount = 1000;
contentStore = new NullContentStore(orphanCount);
contentStore = new NullContentStore(10000);
loadData(orphanCount);
@@ -220,11 +223,12 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
}
}
};
// We use the default cleaner, but fix it up a bit
// We use the default cleaners, but fix them up a bit
EagerContentStoreCleaner eagerCleaner = (EagerContentStoreCleaner) ctx.getBean("eagerContentStoreCleaner");
eagerCleaner.setListeners(Collections.singletonList(listener));
eagerCleaner.setStores(Collections.singletonList(contentStore));
cleaner = (ContentStoreCleaner) ctx.getBean("contentStoreCleaner");
cleaner.setListeners(Collections.singletonList(listener));
cleaner.setProtectDays(0);
cleaner.setStores(Collections.singletonList(contentStore));
// The cleaner has its own txns
cleaner.execute();
@@ -301,22 +305,10 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
private class HibernateHelper extends HibernateDaoSupport
{
private Method methodMakeNode;
private QName dataTypeDefContent;
private QName contentQName;
public HibernateHelper()
{
Class<HibernateHelper> clazz = HibernateHelper.class;
try
{
methodMakeNode = clazz.getMethod("makeNode", new Class[] {ContentData.class});
}
catch (NoSuchMethodException e)
{
throw new RuntimeException("Failed to get methods");
}
dataTypeDefContent = DataTypeDefinition.CONTENT;
contentQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "realContent");
}
/**
@@ -324,17 +316,9 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
*/
public void makeNode(ContentData contentData)
{
throw new UnsupportedOperationException("Fix this method up");
// StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
// Long nodeId = nodeDaoService.newNode(storeRef, GUID.generate(), ContentModel.TYPE_CONTENT).getFirst();
// Node node = (Node) getHibernateTemplate().get(NodeImpl.class, nodeId);
//
// PropertyValue propertyValue = new PropertyValue(dataTypeDefContent, contentData);
// node.getProperties().put(contentQName, propertyValue);
// // persist the node
// getHibernateTemplate().save(node);
//
// txnResourceInterceptor.performManualCheck(methodMakeNode, 10);
StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
Long nodeId = nodeDaoService.newNode(storeRef, GUID.generate(), ContentModel.TYPE_CONTENT).getFirst();
nodeDaoService.addNodeProperty(nodeId, contentQName, contentData);
}
}
}

View File

@@ -34,11 +34,9 @@ import java.util.Map;
import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.avm.AVMNodeDAO;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.repo.domain.contentclean.ContentCleanDAO;
import org.alfresco.repo.domain.avm.AVMNodeDAO;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.repo.node.db.NodeDaoService;
@@ -58,9 +56,7 @@ import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.GUID;
import org.alfresco.util.TempFileProvider;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationEventPublisher;
/**
* @see org.alfresco.repo.content.cleanup.ContentStoreCleaner
@@ -95,33 +91,30 @@ public class ContentStoreCleanerTest extends TestCase
DictionaryService dictionaryService = serviceRegistry.getDictionaryService();
NodeDaoService nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
AVMNodeDAO avmNodeDAO = (AVMNodeDAO) ctx.getBean("newAvmNodeDAO");
ContentCleanDAO contentCleanDAO = (ContentCleanDAO) ctx.getBean("contentCleanDAO");
ContentDataDAO contentDataDAO = (ContentDataDAO) ctx.getBean("contentDataDAO");
ApplicationEventPublisher applicationEventPublisher = (ApplicationEventPublisher) ctx
.getBean("applicationEventPublisher");
eagerCleaner = (EagerContentStoreCleaner) ctx.getBean("eagerContentStoreCleaner");
eagerCleaner.setEagerOrphanCleanup(false);
// we need a store
store = new FileContentStore(applicationEventPublisher, TempFileProvider.getTempDir().getAbsolutePath());
store = (ContentStore) ctx.getBean("fileContentStore");
// and a listener
listener = new DummyCleanerListener();
// initialise record of deleted URLs
deletedUrls = new ArrayList<String>(5);
// construct the test cleaner
// Construct the test cleaners
eagerCleaner = (EagerContentStoreCleaner) ctx.getBean("eagerContentStoreCleaner");
eagerCleaner.setEagerOrphanCleanup(false);
eagerCleaner.setStores(Collections.singletonList(store));
eagerCleaner.setListeners(Collections.singletonList(listener));
cleaner = new ContentStoreCleaner();
cleaner.setEagerContentStoreCleaner(eagerCleaner);
cleaner.setJobLockService(jobLockService);
cleaner.setContentCleanDAO(contentCleanDAO);
cleaner.setContentDataDAO(contentDataDAO);
cleaner.setTransactionService(transactionService);
cleaner.setDictionaryService(dictionaryService);
cleaner.setContentService(contentService);
cleaner.setNodeDaoService(nodeDaoService);
cleaner.setAvmNodeDAO(avmNodeDAO);
cleaner.setStores(Collections.singletonList(store));
cleaner.setListeners(Collections.singletonList(listener));
}
public void tearDown() throws Exception
@@ -293,18 +286,49 @@ public class ContentStoreCleanerTest extends TestCase
public void testImmediateRemoval() throws Exception
{
cleaner.setProtectDays(0);
// add some content to the store
ContentWriter writer = store.getWriter(ContentStore.NEW_CONTENT_CONTEXT);
writer.putContent("ABC");
String contentUrl = writer.getContentUrl();
eagerCleaner.setEagerOrphanCleanup(false);
final StoreRef storeRef = nodeService.createStore("test", getName() + "-" + GUID.generate());
RetryingTransactionCallback<ContentData> testCallback = new RetryingTransactionCallback<ContentData>()
{
public ContentData execute() throws Throwable
{
// Create some content
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
Map<QName, Serializable> properties = new HashMap<QName, Serializable>(13);
properties.put(ContentModel.PROP_NAME, (Serializable)"test.txt");
NodeRef contentNodeRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
ContentModel.ASSOC_CHILDREN,
ContentModel.TYPE_CONTENT,
properties).getChildRef();
ContentWriter writer = contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
writer.putContent("INITIAL CONTENT");
ContentData contentData = writer.getContentData();
// Delete the first node
nodeService.deleteNode(contentNodeRef);
// Done
return contentData;
}
};
ContentData contentData = transactionService.getRetryingTransactionHelper().doInTransaction(testCallback);
// Make sure that the content URL still exists
ContentReader reader = contentService.getRawReader(contentData.getContentUrl());
assertNotNull(reader);
assertTrue("Content should not have been eagerly deleted.", reader.exists());
// fire the cleaner
cleaner.setProtectDays(0);
cleaner.execute();
reader = contentService.getRawReader(contentData.getContentUrl());
// the content should have disappeared as it is not in the database
assertFalse("Unprotected content was not deleted", store.exists(contentUrl));
assertTrue("Content listener was not called", deletedUrls.contains(contentUrl));
assertFalse("Unprotected content was not deleted", reader.exists());
assertTrue("Content listener was not called", deletedUrls.contains(reader.getContentUrl()));
}
public void testProtectedRemoval() throws Exception

View File

@@ -143,10 +143,6 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
*/
public void registerNewContentUrl(String contentUrl)
{
if (!eagerOrphanCleanup)
{
return;
}
Set<String> urlsToDelete = TransactionalResourceHelper.getSet(KEY_POST_ROLLBACK_DELETION_URLS);
urlsToDelete.add(contentUrl);
// Register to listen for transaction rollback
@@ -158,7 +154,18 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
*/
public void registerOrphanedContentUrl(String contentUrl)
{
if (!eagerOrphanCleanup)
registerOrphanedContentUrl(contentUrl, false);
}
/**
* Queues orphaned content for post-transaction removal
*
* @param force <tt>true</tt> for force the post-commit URL deletion
* regardless of the setting {@link #setEagerOrphanCleanup(boolean)}.
*/
public void registerOrphanedContentUrl(String contentUrl, boolean force)
{
if (!eagerOrphanCleanup && !force)
{
return;
}