Merged V2.2 to HEAD

8372: Merged V2.1 to V2.2
      8314: Merged V2.0 to V2.1
         7750: Fix for ACT-475: ContentStoreCleaner causes OutOfMemoryError
      8332: Made content URL column larger to accommodate the extra locale info present in 2.1
      8334: Build fix: V2.1 tighter on authentication for getTempWriter
   8376: Merged V2.1 to V2.2
      8325: Fix for AWC-1089
      8361: Workaround for WCM-882: All metadata extracters can now handle zero length files


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@8497 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2008-03-11 06:22:28 +00:00
parent ceed05d26f
commit cda4e6105f
33 changed files with 1102 additions and 246 deletions

View File

@@ -25,7 +25,6 @@
package org.alfresco.repo.content;
import java.util.Date;
import java.util.Set;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
@@ -150,27 +149,24 @@ public abstract class AbstractContentStore implements ContentStore
}
/**
* Searches for URLs using null dates.
*
* @see ContentStore#getUrls(java.util.Date, java.util.Date)
* @see #getUrls(Date, Date, ContentUrlHandler)
*/
public final Set<String> getUrls()
public final void getUrls(ContentUrlHandler handler) throws ContentIOException
{
return getUrls(null, null);
getUrls(null, null, handler);
}
/**
* Override if the derived class supports the operation.
* Override to provide an implementation. If no implementation is supplied, then the store will not support
* cleaning of orphaned content binaries.
*
* @throws UnsupportedOperationException always
*
* @since 2.1
* @throws UnsupportedOperationException always
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore)
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
throw new UnsupportedOperationException();
}
/**
* Implement to supply a store-specific writer for the given existing content
* and optional target content URL.
@@ -255,6 +251,16 @@ public abstract class AbstractContentStore implements ContentStore
}
return writer;
}
/**
* @see ContentContext
* @see ContentStore#getWriter(ContentContext)
*/
public final ContentWriter getWriter(ContentReader existingContentReader, String newContentUrl)
{
ContentContext ctx = new ContentContext(existingContentReader, newContentUrl);
return getWriter(ctx);
}
/**
* Simple implementation that uses the
@@ -266,14 +272,4 @@ public abstract class AbstractContentStore implements ContentStore
ContentReader reader = getReader(contentUrl);
return reader.exists();
}
/**
* @see ContentContext
* @see ContentStore#getWriter(ContentContext)
*/
public final ContentWriter getWriter(ContentReader existingContentReader, String newContentUrl)
{
ContentContext ctx = new ContentContext(existingContentReader, newContentUrl);
return getWriter(ctx);
}
}

View File

@@ -27,12 +27,14 @@ package org.alfresco.repo.content;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.HashSet;
import java.util.Set;
import javax.transaction.UserTransaction;
import junit.framework.TestCase;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
@@ -118,7 +120,18 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
ContentStore store = getStore();
try
{
Set<String> contentUrls = store.getUrls();
final Set<String> contentUrls = new HashSet<String>(5);
ContentUrlHandler handler = new ContentUrlHandler()
{
public void handle(String contentUrl)
{
if (contentUrls.size() < 50)
{
contentUrls.add(contentUrl);
}
}
};
store.getUrls(handler);
if (contentUrls.size() > 0)
{
return (String) contentUrls.toArray()[0];

View File

@@ -25,9 +25,7 @@
package org.alfresco.repo.content;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
@@ -357,50 +355,38 @@ public abstract class AbstractRoutingContentStore implements ContentStore
return writer;
}
/**
* @see
*/
public ContentWriter getWriter(ContentReader existingContentReader, String newContentUrl) throws ContentIOException
{
return getWriter(new ContentContext(existingContentReader, newContentUrl));
}
/**
* Compile a set of URLs from all stores.
* @see #getUrls(Date, Date, ContentUrlHandler)
*/
public Set<String> getUrls() throws ContentIOException
public void getUrls(ContentUrlHandler handler) throws ContentIOException
{
Set<String> urls = new HashSet<String>(1139);
List<ContentStore> stores = getAllStores();
for (ContentStore store : stores)
{
Set<String> storeUrls = store.getUrls();
urls.addAll(storeUrls);
}
if (logger.isDebugEnabled())
{
logger.debug("Found " + urls.size() + " URLs from " + stores.size() + " stores");
}
return urls;
getUrls(null, null, handler);
}
/**
* Compile a set of URLs from all stores given the date range.
* Passes the call to each of the stores wrapped by this store
*
* @see ContentStore#getUrls(Date, Date, ContentUrlHandler)
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore) throws ContentIOException
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
Set<String> urls = new HashSet<String>(1139);
List<ContentStore> stores = getAllStores();
for (ContentStore store : stores)
{
Set<String> storeUrls = store.getUrls(createdAfter, createdBefore);
urls.addAll(storeUrls);
try
{
store.getUrls(createdAfter, createdBefore, handler);
}
catch (UnsupportedOperationException e)
{
// Support of this is not mandatory
}
}
if (logger.isDebugEnabled())
{
logger.debug("Found " + urls.size() + " URLs from " + stores.size() + " stores");
}
return urls;
}
/**

View File

@@ -36,8 +36,8 @@ import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.Date;
import java.util.Locale;
import java.util.Set;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentStreamListener;
@@ -329,23 +329,34 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertTrue("After-content reader should be closed after reading", readerAfterWrite.isClosed());
}
/**
* Helper method to check if a store contains a particular URL using the getUrl method
*/
private boolean searchForUrl(ContentStore store, final String contentUrl, Date from, Date to)
{
final boolean[] found = new boolean[] {false};
ContentUrlHandler handler = new ContentUrlHandler()
{
public void handle(String checkContentUrl)
{
if (contentUrl.equals(checkContentUrl))
{
found[0] = true;
}
}
};
getStore().getUrls(from, to, handler);
return found[0];
}
public void testGetUrls()
{
ContentStore store = getStore();
try
{
store.getUrls();
}
catch (UnsupportedOperationException e)
{
logger.warn("Store test " + getName() + " not possible on " + store.getClass().getName());
return;
}
ContentWriter writer = getWriter();
writer.putContent("Content for " + getName());
Set<String> contentUrls = store.getUrls();
String contentUrl = writer.getContentUrl();
assertTrue("New content not found in URL set", contentUrls.contains(contentUrl));
final String contentUrl = writer.getContentUrl();
ContentStore store = getStore();
boolean inStore = searchForUrl(store, contentUrl, null, null);
assertTrue("New content not found in URL set", inStore);
}
public void testDeleteSimple() throws Exception
@@ -603,7 +614,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
// Ensure that this test can be done
try
{
store.getUrls();
searchForUrl(store, "abc", null, null);
}
catch (UnsupportedOperationException e)
{
@@ -612,10 +623,10 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
}
// Proceed with the test
ContentWriter writer = getWriter();
Set<String> contentUrls = store.getUrls();
String contentUrl = writer.getContentUrl();
assertTrue("Writer URL not listed by store", contentUrls.contains(contentUrl));
boolean inStore = searchForUrl(store, contentUrl, null, null);
assertTrue("Writer URL not listed by store", inStore);
Date yesterday = new Date(System.currentTimeMillis() - 3600L * 1000L * 24L);
@@ -623,12 +634,12 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
writer.putContent("The quick brown fox...");
// check again
contentUrls = store.getUrls();
assertTrue("Writer URL not listed by store", contentUrls.contains(contentUrl));
inStore = searchForUrl(store, contentUrl, null, null);
assertTrue("Writer URL not listed by store", inStore);
// check that the query for content created before this time yesterday doesn't return the URL
contentUrls = store.getUrls(null, yesterday);
assertFalse("URL was younger than required, but still shows up", contentUrls.contains(contentUrl));
inStore = searchForUrl(store, contentUrl, null, yesterday);
assertFalse("URL was younger than required, but still shows up", inStore);
}
/**

View File

@@ -25,7 +25,6 @@
package org.alfresco.repo.content;
import java.util.Date;
import java.util.Set;
import org.alfresco.service.cmr.repository.ContentAccessor;
import org.alfresco.service.cmr.repository.ContentIOException;
@@ -195,24 +194,23 @@ public interface ContentStore
*
* @see #getUrls(Date, Date)
*/
public Set<String> getUrls();
public void getUrls(ContentUrlHandler handler) throws ContentIOException;
/**
* Get a set of all content URLs in the store. This indicates all content
* available for reads.
* Get a set of all content URLs in the store. This indicates all content available for reads.
*
* @param createdAfter
* all URLs returned must have been created after this date. May be null.
* @param createdBefore
* all URLs returned must have been created before this date. May be null.
* @return
* Returns a complete set of the unique URLs of all available content in the store
* @param handler
* the callback that will passed each URL
* @throws ContentIOException
* if an error occurs
* @throws UnsupportedOperationException
* if the store is unable to provide the information
* @throws ContentIOException
* if an IO error occurs
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore);
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException;
/**
* Deletes the content at the given URL.
@@ -229,8 +227,19 @@ public interface ContentStore
* if the store is unable to perform the action
* @throws UnsupportedContentUrlException
* if the content URL supplied is not supported by the store
* @throws ContentIOException
* @throws ContentIOException if an error occurs
* if an IO error occurs
*/
public boolean delete(String contentUrl);
/**
* Iterface for to use during iteration over content URLs.
*
* @author Derek Hulley
* @since 2.0
*/
public interface ContentUrlHandler
{
void handle(String contentUrl);
}
}

View File

@@ -27,6 +27,7 @@ package org.alfresco.repo.content;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import net.sf.ehcache.Cache;
@@ -34,6 +35,7 @@ import net.sf.ehcache.CacheManager;
import org.alfresco.repo.cache.EhCacheAdapter;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.TempFileProvider;
@@ -217,6 +219,12 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
{
return fileStore.getReader(contentUrl);
}
@Override
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
fileStore.getUrls(createdAfter, createdBefore, handler);
}
}
/**
@@ -239,5 +247,11 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
{
throw new UnsupportedContentUrlException(this, contentUrl);
}
@Override
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
throw new UnsupportedOperationException("getUrls not supported");
}
}
}

View File

@@ -20,31 +20,31 @@
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
* http://www.alfresco.com/legal/licensing
*/
package org.alfresco.repo.content.cleanup;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.avm.AVMNodeDAO;
import org.alfresco.repo.avm.AVMNodeDAO.ContentUrlHandler;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.domain.ContentUrlDAO;
import org.alfresco.repo.domain.Node;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -60,12 +60,11 @@ public class ContentStoreCleaner
{
private static Log logger = LogFactory.getLog(ContentStoreCleaner.class);
private static VmShutdownListener vmShutdownListener = new VmShutdownListener(ContentStoreCleaner.class.getName());
private DictionaryService dictionaryService;
private NodeDaoService nodeDaoService;
private TransactionService transactionService;
private AVMNodeDAO avmNodeDAO;
private ContentUrlDAO contentUrlDAO;
private TransactionService transactionService;
private List<ContentStore> stores;
private List<ContentStoreCleanerListener> listeners;
private int protectDays;
@@ -94,7 +93,6 @@ public class ContentStoreCleaner
}
/**
* Setter for Spring.
* @param avmNodeDAO The AVM Node DAO to get urls with.
*/
public void setAvmNodeDAO(AVMNodeDAO avmNodeDAO)
@@ -102,6 +100,14 @@ public class ContentStoreCleaner
this.avmNodeDAO = avmNodeDAO;
}
/**
* @param contentUrlDAO DAO for recording valid <b>Content URLs</b>
*/
public void setContentUrlDAO(ContentUrlDAO contentUrlDAO)
{
this.contentUrlDAO = contentUrlDAO;
}
/**
* @param transactionService the component to ensure proper transactional wrapping
*/
@@ -144,6 +150,8 @@ public class ContentStoreCleaner
{
PropertyCheck.mandatory(this, "dictionaryService", dictionaryService);
PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService);
PropertyCheck.mandatory(this, "avmNodeDAO", avmNodeDAO);
PropertyCheck.mandatory(this, "contentUrlDAO", contentUrlDAO);
PropertyCheck.mandatory(this, "transactionService", transactionService);
PropertyCheck.mandatory(this, "listeners", listeners);
@@ -160,126 +168,124 @@ public class ContentStoreCleaner
}
}
private Set<String> getValidUrls()
private void removeContentUrlsPresentInMetadata()
{
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
// wrap to make the request in a transaction
RetryingTransactionCallback<List<Serializable>> getUrlsCallback = new RetryingTransactionCallback<List<Serializable>>()
// Remove all the Content URLs for the ADM repository
// Handler that records the URL
final NodePropertyHandler nodePropertyHandler = new NodePropertyHandler()
{
public List<Serializable> execute() throws Throwable
public void handle(Node node, Serializable value)
{
return nodeDaoService.getPropertyValuesByActualType(contentDataType);
// Convert the values to ContentData and extract the URLs
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
String contentUrl = contentData.getContentUrl();
if (contentUrl != null)
{
contentUrlDAO.deleteContentUrl(contentUrl);
}
}
};
// execute in READ-ONLY txn
List<Serializable> values = txnHelper.doInTransaction(getUrlsCallback, true);
final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
// execute in READ-WRITE txn
RetryingTransactionCallback<Object> getUrlsCallback = new RetryingTransactionCallback<Object>()
{
public Object execute() throws Exception
{
nodeDaoService.getPropertyValuesByActualType(contentDataType, nodePropertyHandler);
return null;
};
};
txnHelper.doInTransaction(getUrlsCallback);
// Do the same for the AVM repository.
RetryingTransactionCallback<List<String>> getAVMUrlsCallback = new RetryingTransactionCallback<List<String>>()
final ContentUrlHandler handler = new ContentUrlHandler()
{
public List<String> execute() throws Exception
public void handle(String contentUrl)
{
return avmNodeDAO.getContentUrls();
contentUrlDAO.deleteContentUrl(contentUrl);
}
};
// execute in READ-ONLY txn
List<String> avmContentUrls = txnHelper.doInTransaction(getAVMUrlsCallback, true);
// get all valid URLs
Set<String> validUrls = new HashSet<String>(values.size());
// convert the strings to objects and extract the URL
for (Serializable value : values)
// execute in READ-WRITE txn
RetryingTransactionCallback<Object> getAVMUrlsCallback = new RetryingTransactionCallback<Object>()
{
ContentData contentData = (ContentData) value;
if (contentData.getContentUrl() != null)
public Object execute() throws Exception
{
// a URL was present
validUrls.add(contentData.getContentUrl());
avmNodeDAO.getContentUrls(handler);
return null;
}
}
// put all the avm urls into validUrls.
for (String url : avmContentUrls)
};
txnHelper.doInTransaction(getAVMUrlsCallback);
}
private void addContentUrlsPresentInStores()
{
org.alfresco.repo.content.ContentStore.ContentUrlHandler handler = new org.alfresco.repo.content.ContentStore.ContentUrlHandler()
{
validUrls.add(url);
}
// done
if (logger.isDebugEnabled())
public void handle(String contentUrl)
{
contentUrlDAO.createContentUrl(contentUrl);
}
};
Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
for (ContentStore store : stores)
{
logger.debug("Found " + validUrls.size() + " valid URLs in metadata");
store.getUrls(null, checkAllBeforeDate, handler);
}
return validUrls;
}
public void execute()
{
checkProperties();
try
if (logger.isDebugEnabled())
{
Set<String> validUrls = getValidUrls();
// now clean each store in turn
for (ContentStore store : stores)
logger.debug("Starting content store cleanup.");
}
// This handler removes the URLs from all the stores
final org.alfresco.repo.domain.ContentUrlDAO.ContentUrlHandler handler = new org.alfresco.repo.domain.ContentUrlDAO.ContentUrlHandler()
{
public void handle(String contentUrl)
{
try
for (ContentStore store : stores)
{
clean(validUrls, store);
}
catch (UnsupportedOperationException e)
{
throw new ContentIOException(
"Unable to clean store as the necessary operations are not supported: " + store,
e);
if (logger.isDebugEnabled())
{
if (store.isWriteSupported())
{
logger.debug(" Deleting content URL: " + contentUrl);
}
}
store.delete(contentUrl);
}
}
}
catch (ContentIOException e)
};
// execute in READ-WRITE txn
RetryingTransactionCallback<Object> executeCallback = new RetryingTransactionCallback<Object>()
{
throw e;
}
catch (Throwable e)
public Object execute() throws Exception
{
// Delete all the URLs
contentUrlDAO.deleteAllContentUrls();
// Populate the URLs from the content stores
addContentUrlsPresentInStores();
// Remove URLs present in the metadata
removeContentUrlsPresentInMetadata();
// Any remaining URLs are URls present in the stores but not in the metadata
contentUrlDAO.getAllContentUrls(handler);
// Delete all the URLs
contentUrlDAO.deleteAllContentUrls();
return null;
};
};
transactionService.getRetryingTransactionHelper().doInTransaction(executeCallback);
// Done
if (logger.isDebugEnabled())
{
// If the VM is shutting down, then ignore
if (vmShutdownListener.isVmShuttingDown())
{
// Ignore
}
else
{
logger.error("Exception during cleanup of content", e);
}
}
}
private void clean(Set<String> validUrls, ContentStore store)
{
Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
// get the store's URLs
Set<String> storeUrls = store.getUrls(null, checkAllBeforeDate);
// remove all URLs that occur in the validUrls
storeUrls.removeAll(validUrls);
// now clean the store
for (String url : storeUrls)
{
ContentReader sourceReader = store.getReader(url);
// announce this to the listeners
for (ContentStoreCleanerListener listener : listeners)
{
// get a fresh reader
ContentReader listenerReader = sourceReader.getReader();
// call it
listener.beforeDelete(listenerReader);
}
// delete it
store.delete(url);
if (logger.isDebugEnabled())
{
logger.debug("Removed URL from store: \n" +
" Store: " + store + "\n" +
" URL: " + url);
}
logger.debug(" Content store cleanup completed.");
}
}
}

View File

@@ -27,10 +27,12 @@ package org.alfresco.repo.content.cleanup;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.alfresco.repo.avm.AVMNodeDAO;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.repo.domain.ContentUrlDAO;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.dictionary.DictionaryService;
@@ -66,6 +68,7 @@ public class ContentStoreCleanerTest extends TestCase
DictionaryService dictionaryService = serviceRegistry.getDictionaryService();
NodeDaoService nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
AVMNodeDAO avmNodeDAO = (AVMNodeDAO) ctx.getBean("avmNodeDAO");
ContentUrlDAO contentUrlDAO = (ContentUrlDAO) ctx.getBean("contentUrlDAO");
// we need a store
store = new FileContentStore(TempFileProvider.getTempDir().getAbsolutePath());
@@ -80,6 +83,7 @@ public class ContentStoreCleanerTest extends TestCase
cleaner.setDictionaryService(dictionaryService);
cleaner.setNodeDaoService(nodeDaoService);
cleaner.setAvmNodeDAO(avmNodeDAO);
cleaner.setContentUrlDAO(contentUrlDAO);
cleaner.setStores(Collections.singletonList(store));
cleaner.setListeners(Collections.singletonList(listener));
}
@@ -97,7 +101,6 @@ public class ContentStoreCleanerTest extends TestCase
// the content should have disappeared as it is not in the database
assertFalse("Unprotected content was not deleted", store.exists(contentUrl));
assertTrue("Content listener was not called with deletion", deletedUrls.contains(contentUrl));
}
public void testProtectedRemoval() throws Exception
@@ -116,6 +119,29 @@ public class ContentStoreCleanerTest extends TestCase
assertFalse("Content listener was called with deletion of protected URL", deletedUrls.contains(contentUrl));
}
public void testConcurrentRemoval() throws Exception
{
int threadCount = 2;
final CountDownLatch endLatch = new CountDownLatch(threadCount);
// Kick off the threads
for (int i = 0; i < threadCount; i++)
{
Thread thread = new Thread()
{
@Override
public void run()
{
cleaner.execute();
// Notify of completion
endLatch.countDown();
}
};
thread.start();
}
// Wait for them all to be done
endLatch.await();
}
private class DummyCleanerListener implements ContentStoreCleanerListener
{
public void beforeDelete(ContentReader reader) throws ContentIOException

View File

@@ -29,8 +29,6 @@ import java.io.IOException;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashSet;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.content.AbstractContentStore;
@@ -398,29 +396,26 @@ public class FileContentStore extends AbstractContentStore
}
}
public Set<String> getUrls(Date createdAfter, Date createdBefore)
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler)
{
// recursively get all files within the root
Set<String> contentUrls = new HashSet<String>(1000);
getUrls(rootDirectory, contentUrls, createdAfter, createdBefore);
getUrls(rootDirectory, handler, createdAfter, createdBefore);
// done
if (logger.isDebugEnabled())
{
logger.debug("Listed all content URLS: \n" +
" store: " + this + "\n" +
" count: " + contentUrls.size());
" store: " + this);
}
return contentUrls;
}
/**
* @param directory the current directory to get the files from
* @param contentUrls the list of current content URLs to add to
* @param handler the callback to use for each URL
* @param createdAfter only get URLs for content create after this date
* @param createdBefore only get URLs for content created before this date
* @return Returns a list of all files within the given directory and all subdirectories
*/
private void getUrls(File directory, Set<String> contentUrls, Date createdAfter, Date createdBefore)
private void getUrls(File directory, ContentUrlHandler handler, Date createdAfter, Date createdBefore)
{
File[] files = directory.listFiles();
if (files == null)
@@ -433,7 +428,7 @@ public class FileContentStore extends AbstractContentStore
if (file.isDirectory())
{
// we have a subdirectory - recurse
getUrls(file, contentUrls, createdAfter, createdBefore);
getUrls(file, handler, createdAfter, createdBefore);
}
else
{
@@ -451,7 +446,8 @@ public class FileContentStore extends AbstractContentStore
}
// found a file - create the URL
String contentUrl = makeContentUrl(file);
contentUrls.add(contentUrl);
// Callback
handler.handle(contentUrl);
}
}
}

View File

@@ -626,7 +626,16 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
Map<QName, Serializable> changedProperties = null;
try
{
Map<String, Serializable> rawMetadata = extractRaw(reader);
Map<String, Serializable> rawMetadata = null;
// Check that the content has some meat
if (reader.getSize() > 0 && reader.exists())
{
rawMetadata = extractRaw(reader);
}
else
{
rawMetadata = new HashMap<String, Serializable>(1);
}
// Convert to system properties (standalone)
Map<QName, Serializable> systemProperties = mapRawToSystem(rawMetadata);
// Convert the properties according to the dictionary types

View File

@@ -28,6 +28,7 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
@@ -35,12 +36,15 @@ import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.filestore.FileContentReader;
import org.alfresco.repo.content.filestore.FileContentWriter;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.PropertyMap;
import org.alfresco.util.TempFileProvider;
import org.springframework.context.ApplicationContext;
@@ -134,4 +138,29 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
QUICK_DESCRIPTION,
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_DESCRIPTION)));
}
public void testZeroLengthFile() throws Exception
{
MetadataExtracter extractor = getExtracter();
File file = TempFileProvider.createTempFile(getName(), ".bin");
ContentWriter writer = new FileContentWriter(file);
writer.getContentOutputStream().close();
ContentReader reader = writer.getReader();
// Try the zero length file against all supported mimetypes.
// Note: Normally the reader would need to be fetched for each access, but we need to be sure
// that the content is not accessed on the reader AT ALL.
PropertyMap properties = new PropertyMap();
List<String> mimetypes = mimetypeMap.getMimetypes();
for (String mimetype : mimetypes)
{
if (!extractor.isSupported(mimetype))
{
// Not interested
continue;
}
reader.setMimetype(mimetype);
extractor.extract(reader, properties);
assertEquals("There should not be any new properties", 0, properties.size());
}
}
}

View File

@@ -24,9 +24,8 @@
*/
package org.alfresco.repo.content.replication;
import java.util.Set;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.node.index.IndexRecovery;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
@@ -149,23 +148,28 @@ public class ContentStoreReplicator
}
}
/**
* Handler that does the actual replication
*
* @author Derek Hulley
* @since 2.0
*/
private class ReplicatingHandler implements ContentUrlHandler
{
public void handle(String contentUrl)
{
replicate(contentUrl);
}
}
/**
* Perform a full replication of all source to target URLs.
*/
private void replicate()
{
// get all the URLs from the source
Set<String> sourceUrls = sourceStore.getUrls();
// get all the URLs from the target
Set<String> targetUrls = targetStore.getUrls();
// remove source URLs that are present in the target
sourceUrls.removeAll(targetUrls);
// ensure that each remaining source URL is present in the target
for (String contentUrl : sourceUrls)
{
replicate(contentUrl);
}
ReplicatingHandler handler = new ReplicatingHandler();
// Iterate over all the URLs
sourceStore.getUrls(handler);
}
/**

View File

@@ -25,6 +25,7 @@
package org.alfresco.repo.content.replication;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
import junit.framework.TestCase;
@@ -32,6 +33,7 @@ import junit.framework.TestCase;
import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentContext;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.GUID;
@@ -106,6 +108,21 @@ public class ContentStoreReplicatorTest extends TestCase
targetStore.exists(writer.getContentUrl()));
}
/**
* Handler that merely records the URL
*
* @author Derek Hulley
* @since 2.0
*/
private class UrlRecorder implements ContentUrlHandler
{
public Set<String> urls = new HashSet<String>(1027);
public void handle(String contentUrl)
{
urls.add(contentUrl);
}
}
/**
* Adds content to the source while the replicator is going as fast as possible.
* Just to make it more interesting, the content is sometimes put in the target
@@ -150,11 +167,13 @@ public class ContentStoreReplicatorTest extends TestCase
}
// check that we have an exact match of URLs
Set<String> sourceUrls = sourceStore.getUrls();
Set<String> targetUrls = targetStore.getUrls();
UrlRecorder sourceUrls = new UrlRecorder();
UrlRecorder targetUrls = new UrlRecorder();
sourceStore.getUrls(sourceUrls);
targetStore.getUrls(targetUrls);
sourceUrls.containsAll(targetUrls);
targetUrls.contains(sourceUrls);
sourceUrls.urls.containsAll(targetUrls.urls);
targetUrls.urls.contains(sourceUrls.urls);
}
/**

View File

@@ -25,9 +25,7 @@
package org.alfresco.repo.content.replication;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
@@ -370,32 +368,26 @@ public class ReplicatingContentStore extends AbstractContentStore
}
/**
* @return Returns the results as given by the primary store, and if inbound
* replication is active, merges the URLs from the secondary stores.
* Iterates over results as given by the primary store and all secondary stores. It is up to the handler to eliminate
* duplicates that will occur between the primary and secondary stores.
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore) throws ContentIOException
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
Set<String> urls = new HashSet<String>(1024);
// add in URLs from primary store
Set<String> primaryUrls = primaryStore.getUrls(createdAfter, createdBefore);
urls.addAll(primaryUrls);
primaryStore.getUrls(createdAfter, createdBefore, handler);
// add in URLs from secondary stores (they are visible for reads)
for (ContentStore secondaryStore : secondaryStores)
{
Set<String> secondaryUrls = secondaryStore.getUrls(createdAfter, createdBefore);
// merge them
urls.addAll(secondaryUrls);
secondaryStore.getUrls(createdAfter, createdBefore, handler);
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Found " + urls.size() + " URLs, of which " + primaryUrls.size() + " are primary: \n" +
logger.debug("Iterated over content URLs: \n" +
" created after: " + createdAfter + "\n" +
" created before: " + createdBefore);
}
return urls;
}
/**

View File

@@ -26,6 +26,7 @@ package org.alfresco.repo.content.replication;
import java.io.File;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.SynchronousQueue;
@@ -35,6 +36,7 @@ import java.util.concurrent.TimeUnit;
import org.alfresco.repo.content.AbstractWritableContentStoreTest;
import org.alfresco.repo.content.ContentContext;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
@@ -135,7 +137,15 @@ public class ReplicatingContentStoreTest extends AbstractWritableContentStoreTes
// check that the URL is present for each of the stores
for (ContentStore store : secondaryStores)
{
Set<String> urls = store.getUrls();
final Set<String> urls = new HashSet<String>(1027);
ContentUrlHandler handler = new ContentUrlHandler()
{
public void handle(String contentUrl)
{
urls.add(contentUrl);
}
};
store.getUrls(handler);
assertTrue("URL of new content not present in store", urls.contains(contentUrl) == mustExist);
}
}