Merged V2.2 to HEAD

8372: Merged V2.1 to V2.2
      8314: Merged V2.0 to V2.1
         7750: Fix for ACT-475: ContentStoreCleaner causes OutOfMemoryError
      8332: Made content URL column larger to accommodate the extra locale info present in 2.1
      8334: Build fix: V2.1 tighter on authentication for getTempWriter
   8376: Merged V2.1 to V2.2
      8325: Fix for AWC-1089
      8361: Workaround for WCM-882: All metadata extracters can now handle zero length files


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@8497 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2008-03-11 06:22:28 +00:00
parent ceed05d26f
commit cda4e6105f
33 changed files with 1102 additions and 246 deletions

View File

@@ -83,11 +83,22 @@ public interface AVMNodeDAO
public List<AVMNode> getOrphans(int batchSize);
/**
* Get all content urls in he AVM Repository.
* @return A List of URL Strings.
* Get all content urls in the AVM Repository.
* @param contentUrlHandler the handler that will be called with the URLs
*/
public List<String> getContentUrls();
public void getContentUrls(ContentUrlHandler handler);
/**
* A callback handler for iterating over the content URLs
*
* @author Derek Hulley
* @since 2.0
*/
public interface ContentUrlHandler
{
void handle(String contentUrl);
}
/**
* Get all the nodes that are new in the given store.
* @param store The store to query.

View File

@@ -38,6 +38,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.CacheMode;
import org.hibernate.Query;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
@@ -157,10 +159,15 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
* @return A List of URL Strings.
*/
@SuppressWarnings("unchecked")
public List<String> getContentUrls()
public void getContentUrls(ContentUrlHandler handler)
{
Query query = getSession().getNamedQuery("PlainFileNode.GetContentUrls");
return (List<String>)query.list();
ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
while (results.next())
{
String contentUrl = results.getText(0);
handler.handle(contentUrl);
}
}
/**

View File

@@ -25,7 +25,6 @@
package org.alfresco.repo.content;
import java.util.Date;
import java.util.Set;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
@@ -150,27 +149,24 @@ public abstract class AbstractContentStore implements ContentStore
}
/**
* Searches for URLs using null dates.
*
* @see ContentStore#getUrls(java.util.Date, java.util.Date)
* @see #getUrls(Date, Date, ContentUrlHandler)
*/
public final Set<String> getUrls()
public final void getUrls(ContentUrlHandler handler) throws ContentIOException
{
return getUrls(null, null);
getUrls(null, null, handler);
}
/**
* Override if the derived class supports the operation.
* Override to provide an implementation. If no implementation is supplied, then the store will not support
* cleaning of orphaned content binaries.
*
* @throws UnsupportedOperationException always
*
* @since 2.1
* @throws UnsupportedOperationException always
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore)
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
throw new UnsupportedOperationException();
}
/**
* Implement to supply a store-specific writer for the given existing content
* and optional target content URL.
@@ -255,6 +251,16 @@ public abstract class AbstractContentStore implements ContentStore
}
return writer;
}
/**
* @see ContentContext
* @see ContentStore#getWriter(ContentContext)
*/
public final ContentWriter getWriter(ContentReader existingContentReader, String newContentUrl)
{
ContentContext ctx = new ContentContext(existingContentReader, newContentUrl);
return getWriter(ctx);
}
/**
* Simple implementation that uses the
@@ -266,14 +272,4 @@ public abstract class AbstractContentStore implements ContentStore
ContentReader reader = getReader(contentUrl);
return reader.exists();
}
/**
* @see ContentContext
* @see ContentStore#getWriter(ContentContext)
*/
public final ContentWriter getWriter(ContentReader existingContentReader, String newContentUrl)
{
ContentContext ctx = new ContentContext(existingContentReader, newContentUrl);
return getWriter(ctx);
}
}

View File

@@ -27,12 +27,14 @@ package org.alfresco.repo.content;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.HashSet;
import java.util.Set;
import javax.transaction.UserTransaction;
import junit.framework.TestCase;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
@@ -118,7 +120,18 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
ContentStore store = getStore();
try
{
Set<String> contentUrls = store.getUrls();
final Set<String> contentUrls = new HashSet<String>(5);
ContentUrlHandler handler = new ContentUrlHandler()
{
public void handle(String contentUrl)
{
if (contentUrls.size() < 50)
{
contentUrls.add(contentUrl);
}
}
};
store.getUrls(handler);
if (contentUrls.size() > 0)
{
return (String) contentUrls.toArray()[0];

View File

@@ -25,9 +25,7 @@
package org.alfresco.repo.content;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
@@ -357,50 +355,38 @@ public abstract class AbstractRoutingContentStore implements ContentStore
return writer;
}
/**
* @see
*/
public ContentWriter getWriter(ContentReader existingContentReader, String newContentUrl) throws ContentIOException
{
return getWriter(new ContentContext(existingContentReader, newContentUrl));
}
/**
* Compile a set of URLs from all stores.
* @see #getUrls(Date, Date, ContentUrlHandler)
*/
public Set<String> getUrls() throws ContentIOException
public void getUrls(ContentUrlHandler handler) throws ContentIOException
{
Set<String> urls = new HashSet<String>(1139);
List<ContentStore> stores = getAllStores();
for (ContentStore store : stores)
{
Set<String> storeUrls = store.getUrls();
urls.addAll(storeUrls);
}
if (logger.isDebugEnabled())
{
logger.debug("Found " + urls.size() + " URLs from " + stores.size() + " stores");
}
return urls;
getUrls(null, null, handler);
}
/**
* Compile a set of URLs from all stores given the date range.
* Passes the call to each of the stores wrapped by this store
*
* @see ContentStore#getUrls(Date, Date, ContentUrlHandler)
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore) throws ContentIOException
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
Set<String> urls = new HashSet<String>(1139);
List<ContentStore> stores = getAllStores();
for (ContentStore store : stores)
{
Set<String> storeUrls = store.getUrls(createdAfter, createdBefore);
urls.addAll(storeUrls);
try
{
store.getUrls(createdAfter, createdBefore, handler);
}
catch (UnsupportedOperationException e)
{
// Support of this is not mandatory
}
}
if (logger.isDebugEnabled())
{
logger.debug("Found " + urls.size() + " URLs from " + stores.size() + " stores");
}
return urls;
}
/**

View File

@@ -36,8 +36,8 @@ import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.Date;
import java.util.Locale;
import java.util.Set;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentStreamListener;
@@ -329,23 +329,34 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertTrue("After-content reader should be closed after reading", readerAfterWrite.isClosed());
}
/**
* Helper method to check if a store contains a particular URL using the getUrl method
*/
private boolean searchForUrl(ContentStore store, final String contentUrl, Date from, Date to)
{
final boolean[] found = new boolean[] {false};
ContentUrlHandler handler = new ContentUrlHandler()
{
public void handle(String checkContentUrl)
{
if (contentUrl.equals(checkContentUrl))
{
found[0] = true;
}
}
};
getStore().getUrls(from, to, handler);
return found[0];
}
public void testGetUrls()
{
ContentStore store = getStore();
try
{
store.getUrls();
}
catch (UnsupportedOperationException e)
{
logger.warn("Store test " + getName() + " not possible on " + store.getClass().getName());
return;
}
ContentWriter writer = getWriter();
writer.putContent("Content for " + getName());
Set<String> contentUrls = store.getUrls();
String contentUrl = writer.getContentUrl();
assertTrue("New content not found in URL set", contentUrls.contains(contentUrl));
final String contentUrl = writer.getContentUrl();
ContentStore store = getStore();
boolean inStore = searchForUrl(store, contentUrl, null, null);
assertTrue("New content not found in URL set", inStore);
}
public void testDeleteSimple() throws Exception
@@ -603,7 +614,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
// Ensure that this test can be done
try
{
store.getUrls();
searchForUrl(store, "abc", null, null);
}
catch (UnsupportedOperationException e)
{
@@ -612,10 +623,10 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
}
// Proceed with the test
ContentWriter writer = getWriter();
Set<String> contentUrls = store.getUrls();
String contentUrl = writer.getContentUrl();
assertTrue("Writer URL not listed by store", contentUrls.contains(contentUrl));
boolean inStore = searchForUrl(store, contentUrl, null, null);
assertTrue("Writer URL not listed by store", inStore);
Date yesterday = new Date(System.currentTimeMillis() - 3600L * 1000L * 24L);
@@ -623,12 +634,12 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
writer.putContent("The quick brown fox...");
// check again
contentUrls = store.getUrls();
assertTrue("Writer URL not listed by store", contentUrls.contains(contentUrl));
inStore = searchForUrl(store, contentUrl, null, null);
assertTrue("Writer URL not listed by store", inStore);
// check that the query for content created before this time yesterday doesn't return the URL
contentUrls = store.getUrls(null, yesterday);
assertFalse("URL was younger than required, but still shows up", contentUrls.contains(contentUrl));
inStore = searchForUrl(store, contentUrl, null, yesterday);
assertFalse("URL was younger than required, but still shows up", inStore);
}
/**

View File

@@ -25,7 +25,6 @@
package org.alfresco.repo.content;
import java.util.Date;
import java.util.Set;
import org.alfresco.service.cmr.repository.ContentAccessor;
import org.alfresco.service.cmr.repository.ContentIOException;
@@ -195,24 +194,23 @@ public interface ContentStore
*
* @see #getUrls(Date, Date)
*/
public Set<String> getUrls();
public void getUrls(ContentUrlHandler handler) throws ContentIOException;
/**
* Get a set of all content URLs in the store. This indicates all content
* available for reads.
* Get a set of all content URLs in the store. This indicates all content available for reads.
*
* @param createdAfter
* all URLs returned must have been created after this date. May be null.
* @param createdBefore
* all URLs returned must have been created before this date. May be null.
* @return
* Returns a complete set of the unique URLs of all available content in the store
* @param handler
* the callback that will passed each URL
* @throws ContentIOException
* if an error occurs
* @throws UnsupportedOperationException
* if the store is unable to provide the information
* @throws ContentIOException
* if an IO error occurs
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore);
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException;
/**
* Deletes the content at the given URL.
@@ -229,8 +227,19 @@ public interface ContentStore
* if the store is unable to perform the action
* @throws UnsupportedContentUrlException
* if the content URL supplied is not supported by the store
* @throws ContentIOException
* @throws ContentIOException if an error occurs
* if an IO error occurs
*/
public boolean delete(String contentUrl);
/**
* Iterface for to use during iteration over content URLs.
*
* @author Derek Hulley
* @since 2.0
*/
public interface ContentUrlHandler
{
void handle(String contentUrl);
}
}

View File

@@ -27,6 +27,7 @@ package org.alfresco.repo.content;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import net.sf.ehcache.Cache;
@@ -34,6 +35,7 @@ import net.sf.ehcache.CacheManager;
import org.alfresco.repo.cache.EhCacheAdapter;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.TempFileProvider;
@@ -217,6 +219,12 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
{
return fileStore.getReader(contentUrl);
}
@Override
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
fileStore.getUrls(createdAfter, createdBefore, handler);
}
}
/**
@@ -239,5 +247,11 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
{
throw new UnsupportedContentUrlException(this, contentUrl);
}
@Override
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
throw new UnsupportedOperationException("getUrls not supported");
}
}
}

View File

@@ -20,31 +20,31 @@
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
* http://www.alfresco.com/legal/licensing
*/
package org.alfresco.repo.content.cleanup;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.avm.AVMNodeDAO;
import org.alfresco.repo.avm.AVMNodeDAO.ContentUrlHandler;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.domain.ContentUrlDAO;
import org.alfresco.repo.domain.Node;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -60,12 +60,11 @@ public class ContentStoreCleaner
{
private static Log logger = LogFactory.getLog(ContentStoreCleaner.class);
private static VmShutdownListener vmShutdownListener = new VmShutdownListener(ContentStoreCleaner.class.getName());
private DictionaryService dictionaryService;
private NodeDaoService nodeDaoService;
private TransactionService transactionService;
private AVMNodeDAO avmNodeDAO;
private ContentUrlDAO contentUrlDAO;
private TransactionService transactionService;
private List<ContentStore> stores;
private List<ContentStoreCleanerListener> listeners;
private int protectDays;
@@ -94,7 +93,6 @@ public class ContentStoreCleaner
}
/**
* Setter for Spring.
* @param avmNodeDAO The AVM Node DAO to get urls with.
*/
public void setAvmNodeDAO(AVMNodeDAO avmNodeDAO)
@@ -102,6 +100,14 @@ public class ContentStoreCleaner
this.avmNodeDAO = avmNodeDAO;
}
/**
* @param contentUrlDAO DAO for recording valid <b>Content URLs</b>
*/
public void setContentUrlDAO(ContentUrlDAO contentUrlDAO)
{
this.contentUrlDAO = contentUrlDAO;
}
/**
* @param transactionService the component to ensure proper transactional wrapping
*/
@@ -144,6 +150,8 @@ public class ContentStoreCleaner
{
PropertyCheck.mandatory(this, "dictionaryService", dictionaryService);
PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService);
PropertyCheck.mandatory(this, "avmNodeDAO", avmNodeDAO);
PropertyCheck.mandatory(this, "contentUrlDAO", contentUrlDAO);
PropertyCheck.mandatory(this, "transactionService", transactionService);
PropertyCheck.mandatory(this, "listeners", listeners);
@@ -160,126 +168,124 @@ public class ContentStoreCleaner
}
}
private Set<String> getValidUrls()
private void removeContentUrlsPresentInMetadata()
{
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
// wrap to make the request in a transaction
RetryingTransactionCallback<List<Serializable>> getUrlsCallback = new RetryingTransactionCallback<List<Serializable>>()
// Remove all the Content URLs for the ADM repository
// Handler that records the URL
final NodePropertyHandler nodePropertyHandler = new NodePropertyHandler()
{
public List<Serializable> execute() throws Throwable
public void handle(Node node, Serializable value)
{
return nodeDaoService.getPropertyValuesByActualType(contentDataType);
// Convert the values to ContentData and extract the URLs
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
String contentUrl = contentData.getContentUrl();
if (contentUrl != null)
{
contentUrlDAO.deleteContentUrl(contentUrl);
}
}
};
// execute in READ-ONLY txn
List<Serializable> values = txnHelper.doInTransaction(getUrlsCallback, true);
final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
// execute in READ-WRITE txn
RetryingTransactionCallback<Object> getUrlsCallback = new RetryingTransactionCallback<Object>()
{
public Object execute() throws Exception
{
nodeDaoService.getPropertyValuesByActualType(contentDataType, nodePropertyHandler);
return null;
};
};
txnHelper.doInTransaction(getUrlsCallback);
// Do the same for the AVM repository.
RetryingTransactionCallback<List<String>> getAVMUrlsCallback = new RetryingTransactionCallback<List<String>>()
final ContentUrlHandler handler = new ContentUrlHandler()
{
public List<String> execute() throws Exception
public void handle(String contentUrl)
{
return avmNodeDAO.getContentUrls();
contentUrlDAO.deleteContentUrl(contentUrl);
}
};
// execute in READ-ONLY txn
List<String> avmContentUrls = txnHelper.doInTransaction(getAVMUrlsCallback, true);
// get all valid URLs
Set<String> validUrls = new HashSet<String>(values.size());
// convert the strings to objects and extract the URL
for (Serializable value : values)
// execute in READ-WRITE txn
RetryingTransactionCallback<Object> getAVMUrlsCallback = new RetryingTransactionCallback<Object>()
{
ContentData contentData = (ContentData) value;
if (contentData.getContentUrl() != null)
public Object execute() throws Exception
{
// a URL was present
validUrls.add(contentData.getContentUrl());
avmNodeDAO.getContentUrls(handler);
return null;
}
}
// put all the avm urls into validUrls.
for (String url : avmContentUrls)
};
txnHelper.doInTransaction(getAVMUrlsCallback);
}
private void addContentUrlsPresentInStores()
{
org.alfresco.repo.content.ContentStore.ContentUrlHandler handler = new org.alfresco.repo.content.ContentStore.ContentUrlHandler()
{
validUrls.add(url);
}
// done
if (logger.isDebugEnabled())
public void handle(String contentUrl)
{
contentUrlDAO.createContentUrl(contentUrl);
}
};
Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
for (ContentStore store : stores)
{
logger.debug("Found " + validUrls.size() + " valid URLs in metadata");
store.getUrls(null, checkAllBeforeDate, handler);
}
return validUrls;
}
public void execute()
{
checkProperties();
try
if (logger.isDebugEnabled())
{
Set<String> validUrls = getValidUrls();
// now clean each store in turn
for (ContentStore store : stores)
logger.debug("Starting content store cleanup.");
}
// This handler removes the URLs from all the stores
final org.alfresco.repo.domain.ContentUrlDAO.ContentUrlHandler handler = new org.alfresco.repo.domain.ContentUrlDAO.ContentUrlHandler()
{
public void handle(String contentUrl)
{
try
for (ContentStore store : stores)
{
clean(validUrls, store);
}
catch (UnsupportedOperationException e)
{
throw new ContentIOException(
"Unable to clean store as the necessary operations are not supported: " + store,
e);
if (logger.isDebugEnabled())
{
if (store.isWriteSupported())
{
logger.debug(" Deleting content URL: " + contentUrl);
}
}
store.delete(contentUrl);
}
}
}
catch (ContentIOException e)
};
// execute in READ-WRITE txn
RetryingTransactionCallback<Object> executeCallback = new RetryingTransactionCallback<Object>()
{
throw e;
}
catch (Throwable e)
public Object execute() throws Exception
{
// Delete all the URLs
contentUrlDAO.deleteAllContentUrls();
// Populate the URLs from the content stores
addContentUrlsPresentInStores();
// Remove URLs present in the metadata
removeContentUrlsPresentInMetadata();
// Any remaining URLs are URls present in the stores but not in the metadata
contentUrlDAO.getAllContentUrls(handler);
// Delete all the URLs
contentUrlDAO.deleteAllContentUrls();
return null;
};
};
transactionService.getRetryingTransactionHelper().doInTransaction(executeCallback);
// Done
if (logger.isDebugEnabled())
{
// If the VM is shutting down, then ignore
if (vmShutdownListener.isVmShuttingDown())
{
// Ignore
}
else
{
logger.error("Exception during cleanup of content", e);
}
}
}
private void clean(Set<String> validUrls, ContentStore store)
{
Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
// get the store's URLs
Set<String> storeUrls = store.getUrls(null, checkAllBeforeDate);
// remove all URLs that occur in the validUrls
storeUrls.removeAll(validUrls);
// now clean the store
for (String url : storeUrls)
{
ContentReader sourceReader = store.getReader(url);
// announce this to the listeners
for (ContentStoreCleanerListener listener : listeners)
{
// get a fresh reader
ContentReader listenerReader = sourceReader.getReader();
// call it
listener.beforeDelete(listenerReader);
}
// delete it
store.delete(url);
if (logger.isDebugEnabled())
{
logger.debug("Removed URL from store: \n" +
" Store: " + store + "\n" +
" URL: " + url);
}
logger.debug(" Content store cleanup completed.");
}
}
}

View File

@@ -27,10 +27,12 @@ package org.alfresco.repo.content.cleanup;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.alfresco.repo.avm.AVMNodeDAO;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.repo.domain.ContentUrlDAO;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.dictionary.DictionaryService;
@@ -66,6 +68,7 @@ public class ContentStoreCleanerTest extends TestCase
DictionaryService dictionaryService = serviceRegistry.getDictionaryService();
NodeDaoService nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
AVMNodeDAO avmNodeDAO = (AVMNodeDAO) ctx.getBean("avmNodeDAO");
ContentUrlDAO contentUrlDAO = (ContentUrlDAO) ctx.getBean("contentUrlDAO");
// we need a store
store = new FileContentStore(TempFileProvider.getTempDir().getAbsolutePath());
@@ -80,6 +83,7 @@ public class ContentStoreCleanerTest extends TestCase
cleaner.setDictionaryService(dictionaryService);
cleaner.setNodeDaoService(nodeDaoService);
cleaner.setAvmNodeDAO(avmNodeDAO);
cleaner.setContentUrlDAO(contentUrlDAO);
cleaner.setStores(Collections.singletonList(store));
cleaner.setListeners(Collections.singletonList(listener));
}
@@ -97,7 +101,6 @@ public class ContentStoreCleanerTest extends TestCase
// the content should have disappeared as it is not in the database
assertFalse("Unprotected content was not deleted", store.exists(contentUrl));
assertTrue("Content listener was not called with deletion", deletedUrls.contains(contentUrl));
}
public void testProtectedRemoval() throws Exception
@@ -116,6 +119,29 @@ public class ContentStoreCleanerTest extends TestCase
assertFalse("Content listener was called with deletion of protected URL", deletedUrls.contains(contentUrl));
}
public void testConcurrentRemoval() throws Exception
{
int threadCount = 2;
final CountDownLatch endLatch = new CountDownLatch(threadCount);
// Kick off the threads
for (int i = 0; i < threadCount; i++)
{
Thread thread = new Thread()
{
@Override
public void run()
{
cleaner.execute();
// Notify of completion
endLatch.countDown();
}
};
thread.start();
}
// Wait for them all to be done
endLatch.await();
}
private class DummyCleanerListener implements ContentStoreCleanerListener
{
public void beforeDelete(ContentReader reader) throws ContentIOException

View File

@@ -29,8 +29,6 @@ import java.io.IOException;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashSet;
import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.content.AbstractContentStore;
@@ -398,29 +396,26 @@ public class FileContentStore extends AbstractContentStore
}
}
public Set<String> getUrls(Date createdAfter, Date createdBefore)
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler)
{
// recursively get all files within the root
Set<String> contentUrls = new HashSet<String>(1000);
getUrls(rootDirectory, contentUrls, createdAfter, createdBefore);
getUrls(rootDirectory, handler, createdAfter, createdBefore);
// done
if (logger.isDebugEnabled())
{
logger.debug("Listed all content URLS: \n" +
" store: " + this + "\n" +
" count: " + contentUrls.size());
" store: " + this);
}
return contentUrls;
}
/**
* @param directory the current directory to get the files from
* @param contentUrls the list of current content URLs to add to
* @param handler the callback to use for each URL
* @param createdAfter only get URLs for content create after this date
* @param createdBefore only get URLs for content created before this date
* @return Returns a list of all files within the given directory and all subdirectories
*/
private void getUrls(File directory, Set<String> contentUrls, Date createdAfter, Date createdBefore)
private void getUrls(File directory, ContentUrlHandler handler, Date createdAfter, Date createdBefore)
{
File[] files = directory.listFiles();
if (files == null)
@@ -433,7 +428,7 @@ public class FileContentStore extends AbstractContentStore
if (file.isDirectory())
{
// we have a subdirectory - recurse
getUrls(file, contentUrls, createdAfter, createdBefore);
getUrls(file, handler, createdAfter, createdBefore);
}
else
{
@@ -451,7 +446,8 @@ public class FileContentStore extends AbstractContentStore
}
// found a file - create the URL
String contentUrl = makeContentUrl(file);
contentUrls.add(contentUrl);
// Callback
handler.handle(contentUrl);
}
}
}

View File

@@ -626,7 +626,16 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
Map<QName, Serializable> changedProperties = null;
try
{
Map<String, Serializable> rawMetadata = extractRaw(reader);
Map<String, Serializable> rawMetadata = null;
// Check that the content has some meat
if (reader.getSize() > 0 && reader.exists())
{
rawMetadata = extractRaw(reader);
}
else
{
rawMetadata = new HashMap<String, Serializable>(1);
}
// Convert to system properties (standalone)
Map<QName, Serializable> systemProperties = mapRawToSystem(rawMetadata);
// Convert the properties according to the dictionary types

View File

@@ -28,6 +28,7 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
@@ -35,12 +36,15 @@ import junit.framework.TestCase;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.content.filestore.FileContentReader;
import org.alfresco.repo.content.filestore.FileContentWriter;
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.PropertyMap;
import org.alfresco.util.TempFileProvider;
import org.springframework.context.ApplicationContext;
@@ -134,4 +138,29 @@ public abstract class AbstractMetadataExtracterTest extends TestCase
QUICK_DESCRIPTION,
DefaultTypeConverter.INSTANCE.convert(String.class, properties.get(ContentModel.PROP_DESCRIPTION)));
}
public void testZeroLengthFile() throws Exception
{
MetadataExtracter extractor = getExtracter();
File file = TempFileProvider.createTempFile(getName(), ".bin");
ContentWriter writer = new FileContentWriter(file);
writer.getContentOutputStream().close();
ContentReader reader = writer.getReader();
// Try the zero length file against all supported mimetypes.
// Note: Normally the reader would need to be fetched for each access, but we need to be sure
// that the content is not accessed on the reader AT ALL.
PropertyMap properties = new PropertyMap();
List<String> mimetypes = mimetypeMap.getMimetypes();
for (String mimetype : mimetypes)
{
if (!extractor.isSupported(mimetype))
{
// Not interested
continue;
}
reader.setMimetype(mimetype);
extractor.extract(reader, properties);
assertEquals("There should not be any new properties", 0, properties.size());
}
}
}

View File

@@ -24,9 +24,8 @@
*/
package org.alfresco.repo.content.replication;
import java.util.Set;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.node.index.IndexRecovery;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentWriter;
@@ -149,23 +148,28 @@ public class ContentStoreReplicator
}
}
/**
* Handler that does the actual replication
*
* @author Derek Hulley
* @since 2.0
*/
private class ReplicatingHandler implements ContentUrlHandler
{
public void handle(String contentUrl)
{
replicate(contentUrl);
}
}
/**
* Perform a full replication of all source to target URLs.
*/
private void replicate()
{
// get all the URLs from the source
Set<String> sourceUrls = sourceStore.getUrls();
// get all the URLs from the target
Set<String> targetUrls = targetStore.getUrls();
// remove source URLs that are present in the target
sourceUrls.removeAll(targetUrls);
// ensure that each remaining source URL is present in the target
for (String contentUrl : sourceUrls)
{
replicate(contentUrl);
}
ReplicatingHandler handler = new ReplicatingHandler();
// Iterate over all the URLs
sourceStore.getUrls(handler);
}
/**

View File

@@ -25,6 +25,7 @@
package org.alfresco.repo.content.replication;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
import junit.framework.TestCase;
@@ -32,6 +33,7 @@ import junit.framework.TestCase;
import org.alfresco.repo.content.AbstractContentStore;
import org.alfresco.repo.content.ContentContext;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.GUID;
@@ -106,6 +108,21 @@ public class ContentStoreReplicatorTest extends TestCase
targetStore.exists(writer.getContentUrl()));
}
/**
* Handler that merely records the URL
*
* @author Derek Hulley
* @since 2.0
*/
private class UrlRecorder implements ContentUrlHandler
{
public Set<String> urls = new HashSet<String>(1027);
public void handle(String contentUrl)
{
urls.add(contentUrl);
}
}
/**
* Adds content to the source while the replicator is going as fast as possible.
* Just to make it more interesting, the content is sometimes put in the target
@@ -150,11 +167,13 @@ public class ContentStoreReplicatorTest extends TestCase
}
// check that we have an exact match of URLs
Set<String> sourceUrls = sourceStore.getUrls();
Set<String> targetUrls = targetStore.getUrls();
UrlRecorder sourceUrls = new UrlRecorder();
UrlRecorder targetUrls = new UrlRecorder();
sourceStore.getUrls(sourceUrls);
targetStore.getUrls(targetUrls);
sourceUrls.containsAll(targetUrls);
targetUrls.contains(sourceUrls);
sourceUrls.urls.containsAll(targetUrls.urls);
targetUrls.urls.contains(sourceUrls.urls);
}
/**

View File

@@ -25,9 +25,7 @@
package org.alfresco.repo.content.replication;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
@@ -370,32 +368,26 @@ public class ReplicatingContentStore extends AbstractContentStore
}
/**
* @return Returns the results as given by the primary store, and if inbound
* replication is active, merges the URLs from the secondary stores.
* Iterates over results as given by the primary store and all secondary stores. It is up to the handler to eliminate
* duplicates that will occur between the primary and secondary stores.
*/
public Set<String> getUrls(Date createdAfter, Date createdBefore) throws ContentIOException
public void getUrls(Date createdAfter, Date createdBefore, ContentUrlHandler handler) throws ContentIOException
{
Set<String> urls = new HashSet<String>(1024);
// add in URLs from primary store
Set<String> primaryUrls = primaryStore.getUrls(createdAfter, createdBefore);
urls.addAll(primaryUrls);
primaryStore.getUrls(createdAfter, createdBefore, handler);
// add in URLs from secondary stores (they are visible for reads)
for (ContentStore secondaryStore : secondaryStores)
{
Set<String> secondaryUrls = secondaryStore.getUrls(createdAfter, createdBefore);
// merge them
urls.addAll(secondaryUrls);
secondaryStore.getUrls(createdAfter, createdBefore, handler);
}
// done
if (logger.isDebugEnabled())
{
logger.debug("Found " + urls.size() + " URLs, of which " + primaryUrls.size() + " are primary: \n" +
logger.debug("Iterated over content URLs: \n" +
" created after: " + createdAfter + "\n" +
" created before: " + createdBefore);
}
return urls;
}
/**

View File

@@ -26,6 +26,7 @@ package org.alfresco.repo.content.replication;
import java.io.File;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.SynchronousQueue;
@@ -35,6 +36,7 @@ import java.util.concurrent.TimeUnit;
import org.alfresco.repo.content.AbstractWritableContentStoreTest;
import org.alfresco.repo.content.ContentContext;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentReader;
@@ -135,7 +137,15 @@ public class ReplicatingContentStoreTest extends AbstractWritableContentStoreTes
// check that the URL is present for each of the stores
for (ContentStore store : secondaryStores)
{
Set<String> urls = store.getUrls();
final Set<String> urls = new HashSet<String>(1027);
ContentUrlHandler handler = new ContentUrlHandler()
{
public void handle(String contentUrl)
{
urls.add(contentUrl);
}
};
store.getUrls(handler);
assertTrue("URL of new content not present in store", urls.contains(contentUrl) == mustExist);
}
}

View File

@@ -0,0 +1,49 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing
*/
package org.alfresco.repo.domain;
/**
* Interface for persistent <b>Content URL</b> objects.
* <p>
* Instances represent physically stored content.
*
* @author Derek Hulley
* @since 2.0
*/
public interface ContentUrl
{
/**
* @return Returns the auto-generated ID
*/
Long getId();
String getContentUrl();
void setContentUrl(String contentUrl);
//
// boolean isOrphaned();
//
// void setOrphaned(boolean orphaned);
}

View File

@@ -0,0 +1,73 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing */
package org.alfresco.repo.domain;
import java.util.Set;
/**
* Abstraction for manipulating <b>Content URL</b> entities.
*
* @author Derek Hulley
* @since 2.0
*/
public interface ContentUrlDAO
{
/**
* Create a new <b>Content URL</b> or get an existing instance.
*/
ContentUrl createContentUrl(String contentUrl);
/**
* Enumerate all the available <b>Content URLs</b>, calling back to the given handler.
*
* @param handler the component that will be called with each URL
*/
void getAllContentUrls(ContentUrlHandler handler);
/**
* Delete the <b>Content URL</b>.
*/
void deleteContentUrl(String contentUrl);
/**
* Delete a set of <b>Content URL</b>.
*/
void deleteContentUrls(Set<String> contentUrls);
/**
* Delete all <b>Content URL</b> entities.
*/
void deleteAllContentUrls();
/**
* A callback interface to handle <b>Content URLS<b> produced by iteration.
*
* @author Derek Hulley
* @since 2.0
*/
public interface ContentUrlHandler
{
void handle(String contentUrl);
};
}

View File

@@ -0,0 +1,212 @@
package org.alfresco.repo.domain;
import java.util.HashSet;
import java.util.Set;
import javax.transaction.UserTransaction;
import junit.framework.TestCase;
import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper;
import org.springframework.context.ApplicationContext;
/**
* @see org.alfresco.repo.domain.ContentUrlDAO
*
* @author Derek Hulley
* @since 2.1
*/
public class ContentUrlDAOTest extends TestCase
{
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
private ContentUrlDAO dao;
private TransactionService transactionService;
private ContentService contentService;
@Override
protected void setUp() throws Exception
{
ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
dao = (ContentUrlDAO) ctx.getBean("contentUrlDAO");
contentService = serviceRegistry.getContentService();
transactionService = serviceRegistry.getTransactionService();
}
@Override
protected void tearDown() throws Exception
{
}
public void testCreateContentUrl() throws Throwable
{
UserTransaction txn = transactionService.getUserTransaction();
try
{
txn.begin();
RunAsWork<String> getTempWriterWork = new RunAsWork<String>()
{
public String doWork() throws Exception
{
return contentService.getTempWriter().getContentUrl();
}
};
String contentUrl = AuthenticationUtil.runAs(getTempWriterWork, AuthenticationUtil.SYSTEM_USER_NAME);
// Make sure that it can be written in duplicate
ContentUrl entity1 = dao.createContentUrl(contentUrl);
ContentUrl entity2 = dao.createContentUrl(contentUrl);
assertNotSame("Assigned IDs must be new", entity1.getId(), entity2.getId());
txn.commit();
}
catch (Throwable e)
{
try { txn.rollback(); } catch (Throwable ee) {}
throw e;
}
}
private Set<String> makeUrls(int count) throws Throwable
{
final Set<String> urls = new HashSet<String>(count);
for (int i = 0; i < count; i++)
{
String contentUrl = String.format("%s%s/%04d", FileContentStore.STORE_PROTOCOL, getName(), i);
dao.createContentUrl(contentUrl);
urls.add(contentUrl);
}
return urls;
}
public void testGetAllContentUrls() throws Throwable
{
UserTransaction txn = transactionService.getUserTransaction();
try
{
txn.begin();
final Set<String> urls = makeUrls(1000);
// Now iterate over them in the same transaction
ContentUrlDAO.ContentUrlHandler handler = new ContentUrlDAO.ContentUrlHandler()
{
public void handle(String contentUrl)
{
urls.remove(contentUrl);
}
};
dao.getAllContentUrls(handler);
assertEquals("Not all content URLs were enumerated", 0, urls.size());
txn.commit();
}
catch (Throwable e)
{
try { txn.rollback(); } catch (Throwable ee) {}
throw e;
}
}
public void testDeleteContentUrl() throws Throwable
{
UserTransaction txn = transactionService.getUserTransaction();
try
{
txn.begin();
final Set<String> urls = makeUrls(1000);
// Delete them
for (String url : urls)
{
dao.deleteContentUrl(url);
}
// Now iterate over them in the same transaction
ContentUrlDAO.ContentUrlHandler handler = new ContentUrlDAO.ContentUrlHandler()
{
public void handle(String contentUrl)
{
urls.remove(contentUrl);
}
};
dao.getAllContentUrls(handler);
// All the URLs previously deleted will not have been removed from the Set
assertEquals("Specific content URLs were not deleted", 1000, urls.size());
txn.commit();
}
catch (Throwable e)
{
try { txn.rollback(); } catch (Throwable ee) {}
throw e;
}
}
public void testDeleteContentUrls() throws Throwable
{
UserTransaction txn = transactionService.getUserTransaction();
try
{
txn.begin();
final Set<String> urls = makeUrls(1000);
// Delete them
dao.deleteContentUrls(urls);
// Now iterate over them in the same transaction
ContentUrlDAO.ContentUrlHandler handler = new ContentUrlDAO.ContentUrlHandler()
{
public void handle(String contentUrl)
{
urls.remove(contentUrl);
}
};
dao.getAllContentUrls(handler);
// All the URLs previously deleted will not have been removed from the Set
assertEquals("Specific content URLs were not deleted", 1000, urls.size());
txn.commit();
}
catch (Throwable e)
{
try { txn.rollback(); } catch (Throwable ee) {}
throw e;
}
}
public void testDeleteAllContentUrls() throws Throwable
{
UserTransaction txn = transactionService.getUserTransaction();
try
{
txn.begin();
makeUrls(1000);
// Delete them
dao.deleteAllContentUrls();
// Check that there are none left
// Now iterate over them in the same transaction
ContentUrlDAO.ContentUrlHandler handler = new ContentUrlDAO.ContentUrlHandler()
{
public void handle(String contentUrl)
{
fail("There should not be any URLs remaining.");
}
};
dao.getAllContentUrls(handler);
txn.commit();
}
catch (Throwable e)
{
try { txn.rollback(); } catch (Throwable ee) {}
throw e;
}
}
}

View File

@@ -0,0 +1,56 @@
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE hibernate-mapping PUBLIC
'-//Hibernate/Hibernate Mapping DTD 3.0//EN'
'http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd'>
<hibernate-mapping>
<class
name="org.alfresco.repo.domain.hibernate.ContentUrlImpl"
proxy="org.alfresco.repo.domain.ContentUrl"
table="alf_content_url"
dynamic-update="false"
dynamic-insert="false"
select-before-update="false"
optimistic-lock="none" >
<!-- auto-generated ID -->
<id name="id" column="id" type="long" >
<generator class="increment" />
</id>
<property name="contentUrl" column="content_url" type="string" length="240" not-null="true" index="idx_alf_con_urls" />
<!--
<property name="isOrphaned" column="is_orphaned" type="boolean" not-null="true" />
-->
</class>
<query name="contentUrl.GetAll">
select
entity.contentUrl
from
org.alfresco.repo.domain.hibernate.ContentUrlImpl entity
</query>
<query name="contentUrl.DeleteInList">
delete
from
org.alfresco.repo.domain.hibernate.ContentUrlImpl entity
where
entity.contentUrl in (:contentUrls)
</query>
<query name="contentUrl.DeleteByUrl">
delete
from
org.alfresco.repo.domain.hibernate.ContentUrlImpl entity
where
entity.contentUrl = :contentUrl
</query>
<query name="contentUrl.DeleteAll">
delete
from
org.alfresco.repo.domain.hibernate.ContentUrlImpl entity
</query>
</hibernate-mapping>

View File

@@ -0,0 +1,86 @@
/*
* Copyright (C) 2005-2007 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing
*/
package org.alfresco.repo.domain.hibernate;
import java.io.Serializable;
import org.alfresco.repo.domain.ContentUrl;
/**
* Bean containing all the persistence data representing a <b>Content Url</b>.
* <p>
* This implementation of the {@link org.alfresco.repo.domain.Node Node} interface is
* Hibernate specific.
*
* @author Derek Hulley
* @since 2.0
*/
public class ContentUrlImpl extends LifecycleAdapter implements ContentUrl, Serializable
{
private static final long serialVersionUID = -7368859912728834288L;
private Long id;
private String contentUrl;
// private boolean isOrphaned;
public ContentUrlImpl()
{
// isOrphaned = false;
}
public Long getId()
{
return id;
}
/**
* For Hibernate Use
*/
@SuppressWarnings("unused")
private void setId(Long id)
{
this.id = id;
}
public String getContentUrl()
{
return contentUrl;
}
public void setContentUrl(String contentUrl)
{
this.contentUrl = contentUrl;
}
//
// public boolean isOrphaned()
// {
// return isOrphaned;
// }
//
// public void setOrphaned(boolean isOrphaned)
// {
// this.isOrphaned = isOrphaned;
// }
}

View File

@@ -0,0 +1,123 @@
package org.alfresco.repo.domain.hibernate;
import java.util.Set;
import org.alfresco.repo.domain.ContentUrl;
import org.alfresco.repo.domain.ContentUrlDAO;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.CacheMode;
import org.hibernate.Query;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
import org.hibernate.type.TypeFactory;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* Hibernate-specific implementation of the DAO layer for <b>Content URLs</b>.
*
* @author Derek Hulley
* @since 2.0
*/
public class HibernateContentUrlDAOImpl extends HibernateDaoSupport implements ContentUrlDAO
{
private static final String QUERY_GET_ALL = "contentUrl.GetAll";
private static final String UPDATE_DELETE_BY_URL = "contentUrl.DeleteByUrl";
private static final String UPDATE_DELETE_IN_LIST = "contentUrl.DeleteInList";
private static final String UPDATE_DELETE_ALL = "contentUrl.DeleteAll";
private static Log logger = LogFactory.getLog(HibernateContentUrlDAOImpl.class);
public ContentUrl createContentUrl(String contentUrl)
{
ContentUrl entity = new ContentUrlImpl();
entity.setContentUrl(contentUrl);
getSession().save(entity);
return entity;
}
public void getAllContentUrls(final ContentUrlHandler handler)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.QUERY_GET_ALL)
.setCacheMode(CacheMode.IGNORE);
return query.scroll(ScrollMode.FORWARD_ONLY);
}
};
ScrollableResults results = (ScrollableResults) getHibernateTemplate().execute(callback);
while (results.next())
{
String contentUrl = results.getText(0);
handler.handle(contentUrl);
}
}
public void deleteContentUrl(final String contentUrl)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
session.flush();
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.UPDATE_DELETE_BY_URL)
.setCacheMode(CacheMode.IGNORE)
.setString("contentUrl", contentUrl);
return (Integer) query.executeUpdate();
}
};
Integer deletedCount = (Integer) getHibernateTemplate().execute(callback);
int entityCount = getSession().getStatistics().getEntityCount();
if (logger.isDebugEnabled())
{
logger.debug("Deleted " + deletedCount + " ContentUrl entities.");
}
}
public void deleteContentUrls(final Set<String> contentUrls)
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
session.flush();
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.UPDATE_DELETE_IN_LIST)
.setCacheMode(CacheMode.IGNORE)
.setParameterList("contentUrls", contentUrls, TypeFactory.basic("string"));
return (Integer) query.executeUpdate();
}
};
Integer deletedCount = (Integer) getHibernateTemplate().execute(callback);
if (logger.isDebugEnabled())
{
logger.debug("Deleted " + deletedCount + " ContentUrl entities.");
}
}
public void deleteAllContentUrls()
{
HibernateCallback callback = new HibernateCallback()
{
public Object doInHibernate(Session session)
{
session.flush();
Query query = session
.getNamedQuery(HibernateContentUrlDAOImpl.UPDATE_DELETE_ALL)
.setCacheMode(CacheMode.IGNORE);
return (Integer) query.executeUpdate();
}
};
Integer deletedCount = (Integer) getHibernateTemplate().execute(callback);
if (logger.isDebugEnabled())
{
logger.debug("Deleted " + deletedCount + " ContentUrl entities.");
}
}
}

View File

@@ -39,6 +39,7 @@ import org.alfresco.repo.domain.ChildAssoc;
import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.NodeStatus;
import org.alfresco.repo.node.BaseNodeServiceTest;
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
@@ -297,7 +298,15 @@ public class DbNodeServiceImplTest extends BaseNodeServiceTest
collection);
// get a list of all content values
List<Serializable> allContentDatas = nodeDaoService.getPropertyValuesByActualType(contentDataType);
final List<Serializable> allContentDatas = new ArrayList<Serializable>(500);
NodePropertyHandler handler = new NodePropertyHandler()
{
public void handle(Node node, Serializable value)
{
allContentDatas.add(value);
}
};
nodeDaoService.getPropertyValuesByActualType(contentDataType, handler);
assertTrue("At least two instances expected", allContentDatas.size() >= 2);
assertTrue("Single content data not present in results",
allContentDatas.contains(contentDataSingle));

View File

@@ -277,12 +277,20 @@ public interface NodeDaoService
public void deleteNodeAssoc(NodeAssoc assoc);
/**
* Fetch all property values for the given type definition. This will also dig out values that
* Iterate over all property values for the given type definition. This will also dig out values that
* were persisted as type <b>d:any</b>.
*
* @param actualDataTypeDefinition the persisted type to retrieve
* @param handler the callback to use while iterating over the URLs
* @return Returns the values for the given type definition
*/
public List<Serializable> getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition);
public void getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition, NodePropertyHandler handler);
/**
* Get properties with the given type and string value.
* TODO: Refactor as in getPropertyValuesByActualType
*/
public Collection<Node> getNodesWithPropertyStringValueForStore(StoreRef storeRef, QName propQName, String propStringValue);
/**
* @return Returns the total number of nodes in the ADM repository
@@ -293,7 +301,16 @@ public interface NodeDaoService
*/
public int getNodeCount(final StoreRef storeRef);
public Collection<Node> getNodesWithPropertyStringValueForStore(final StoreRef storeRef, final QName propQName, final String propStringValue);
/**
* Iterface to handle callbacks when iterating over properties
*
* @author Derek Hulley
* @since 2.0
*/
public interface NodePropertyHandler
{
void handle(Node node, Serializable value);
}
public Transaction getTxnById(long txnId);
/**

View File

@@ -1376,7 +1376,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
getHibernateTemplate().delete(assoc);
}
public List<Serializable> getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition)
public void getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition, NodePropertyHandler handler)
{
// get the in-database string representation of the actual type
QName typeQName = actualDataTypeDefinition.getName();
@@ -1393,7 +1393,6 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
};
ScrollableResults results = (ScrollableResults) getHibernateTemplate().execute(callback);
// Loop through, extracting content URLs
List<Serializable> convertedValues = new ArrayList<Serializable>(1000);
TypeConverter converter = DefaultTypeConverter.INSTANCE;
int unflushedCount = 0;
while(results.next())
@@ -1418,16 +1417,19 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
continue;
}
Serializable convertedValue = null;
try
{
Serializable convertedValue = (Serializable) converter.convert(actualDataTypeDefinition, value);
// it converted, so add it
convertedValues.add(convertedValue);
convertedValue = (Serializable) converter.convert(actualDataTypeDefinition, value);
}
catch (Throwable e)
{
// The value can't be converted - forget it
}
if (convertedValue != null)
{
handler.handle(node, convertedValue);
}
}
}
unflushedCount++;
@@ -1438,7 +1440,6 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
unflushedCount = 0;
}
}
return convertedValues;
}
/**