mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-08-07 17:49:17 +00:00
Merged V3.2 to HEAD
19109: Merged V3.2.0 to V3.2 19098: ALF-1960: Fixed issues when syncing with deeply nested LDAP groups git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@19116 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -33,7 +33,9 @@ import java.util.concurrent.ThreadPoolExecutor;
|
|||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import org.alfresco.error.AlfrescoRuntimeException;
|
import org.alfresco.error.AlfrescoRuntimeException;
|
||||||
|
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||||
|
import org.alfresco.repo.transaction.TransactionListenerAdapter;
|
||||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||||
import org.alfresco.service.cmr.rule.RuleService;
|
import org.alfresco.service.cmr.rule.RuleService;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
@@ -80,6 +82,9 @@ public class BatchProcessor<T> implements BatchMonitor
|
|||||||
/** The current entry id. */
|
/** The current entry id. */
|
||||||
private String currentEntryId;
|
private String currentEntryId;
|
||||||
|
|
||||||
|
/** The number of batches currently executing. */
|
||||||
|
private int executingCount;
|
||||||
|
|
||||||
/** The last error. */
|
/** The last error. */
|
||||||
private Throwable lastError;
|
private Throwable lastError;
|
||||||
|
|
||||||
@@ -420,7 +425,7 @@ public class BatchProcessor<T> implements BatchMonitor
|
|||||||
/**
|
/**
|
||||||
* A callback that invokes a worker on a batch, optionally in a new transaction.
|
* A callback that invokes a worker on a batch, optionally in a new transaction.
|
||||||
*/
|
*/
|
||||||
class TxnCallback implements RetryingTransactionCallback<Object>, Runnable
|
class TxnCallback extends TransactionListenerAdapter implements RetryingTransactionCallback<Object>, Runnable
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -464,6 +469,9 @@ public class BatchProcessor<T> implements BatchMonitor
|
|||||||
/** The last error entry id. */
|
/** The last error entry id. */
|
||||||
private String txnLastErrorEntryId;
|
private String txnLastErrorEntryId;
|
||||||
|
|
||||||
|
/** Has a retryable failure occurred ? */
|
||||||
|
private boolean hadRetryFailure;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* (non-Javadoc)
|
* (non-Javadoc)
|
||||||
* @see org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback#execute ()
|
* @see org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback#execute ()
|
||||||
@@ -471,13 +479,41 @@ public class BatchProcessor<T> implements BatchMonitor
|
|||||||
public Object execute() throws Throwable
|
public Object execute() throws Throwable
|
||||||
{
|
{
|
||||||
reset();
|
reset();
|
||||||
|
if (this.batch.isEmpty())
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bind this instance to the transaction
|
||||||
|
AlfrescoTransactionSupport.bindListener(this);
|
||||||
|
|
||||||
|
synchronized (BatchProcessor.this)
|
||||||
|
{
|
||||||
|
// If we are retrying after failure, assume there are cross-dependencies and wait for other
|
||||||
|
// executing batches to complete
|
||||||
|
if (this.hadRetryFailure)
|
||||||
|
{
|
||||||
|
while (BatchProcessor.this.executingCount > 0)
|
||||||
|
{
|
||||||
|
if (BatchProcessor.this.logger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
BatchProcessor.this.logger.debug(Thread.currentThread().getName()
|
||||||
|
+ " Recoverable failure: waiting for other batches to complete");
|
||||||
|
}
|
||||||
|
BatchProcessor.this.wait();
|
||||||
|
}
|
||||||
|
if (BatchProcessor.this.logger.isDebugEnabled())
|
||||||
|
{
|
||||||
|
BatchProcessor.this.logger.debug(Thread.currentThread().getName() + " ready to execute");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BatchProcessor.this.currentEntryId = this.worker.getIdentifier(this.batch.get(0));
|
||||||
|
BatchProcessor.this.executingCount++;
|
||||||
|
}
|
||||||
|
|
||||||
for (T entry : this.batch)
|
for (T entry : this.batch)
|
||||||
{
|
{
|
||||||
this.txnEntryId = this.worker.getIdentifier(entry);
|
this.txnEntryId = this.worker.getIdentifier(entry);
|
||||||
synchronized (BatchProcessor.this)
|
|
||||||
{
|
|
||||||
BatchProcessor.this.currentEntryId = this.txnEntryId;
|
|
||||||
}
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
this.worker.process(entry);
|
this.worker.process(entry);
|
||||||
@@ -498,6 +534,8 @@ public class BatchProcessor<T> implements BatchMonitor
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
// Next time we retry, we will wait for other executing batches to complete
|
||||||
|
this.hadRetryFailure = true;
|
||||||
throw t;
|
throw t;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -621,6 +659,28 @@ public class BatchProcessor<T> implements BatchMonitor
|
|||||||
reset();
|
reset();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void afterCommit()
|
||||||
|
{
|
||||||
|
// Wake up any waiting batches
|
||||||
|
synchronized (BatchProcessor.this)
|
||||||
|
{
|
||||||
|
BatchProcessor.this.executingCount--;
|
||||||
|
BatchProcessor.this.notifyAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void afterRollback()
|
||||||
|
{
|
||||||
|
// Wake up any waiting batches
|
||||||
|
synchronized (BatchProcessor.this)
|
||||||
|
{
|
||||||
|
BatchProcessor.this.executingCount--;
|
||||||
|
BatchProcessor.this.notifyAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -38,8 +38,6 @@ import org.alfresco.repo.node.AbstractNodeServiceImpl;
|
|||||||
import org.alfresco.repo.node.StoreArchiveMap;
|
import org.alfresco.repo.node.StoreArchiveMap;
|
||||||
import org.alfresco.repo.node.index.NodeIndexer;
|
import org.alfresco.repo.node.index.NodeIndexer;
|
||||||
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
import org.alfresco.repo.security.authentication.AuthenticationUtil;
|
||||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
|
||||||
import org.alfresco.repo.transaction.TransactionListenerAdapter;
|
|
||||||
import org.alfresco.repo.transaction.TransactionalResourceHelper;
|
import org.alfresco.repo.transaction.TransactionalResourceHelper;
|
||||||
import org.alfresco.service.cmr.dictionary.AspectDefinition;
|
import org.alfresco.service.cmr.dictionary.AspectDefinition;
|
||||||
import org.alfresco.service.cmr.dictionary.AssociationDefinition;
|
import org.alfresco.service.cmr.dictionary.AssociationDefinition;
|
||||||
@@ -336,13 +334,6 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
|
|||||||
addMissingAspects(childNodePair, propertiesBefore, propertiesAfter);
|
addMissingAspects(childNodePair, propertiesBefore, propertiesAfter);
|
||||||
addMissingAspects(parentNodePair, assocTypeQName);
|
addMissingAspects(parentNodePair, assocTypeQName);
|
||||||
|
|
||||||
/**
|
|
||||||
* track new node ref so we can validate its path.
|
|
||||||
*
|
|
||||||
* it may be valid now, but who knows what will happen between
|
|
||||||
* now and commit!
|
|
||||||
*/
|
|
||||||
trackNewNodeRef(childAssocRef.getChildRef());
|
|
||||||
untrackDeletedNodeRef(childAssocRef.getChildRef());
|
untrackDeletedNodeRef(childAssocRef.getChildRef());
|
||||||
|
|
||||||
// Index
|
// Index
|
||||||
@@ -352,27 +343,6 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
|
|||||||
return childAssocRef;
|
return childAssocRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Track a new node ref so we can validate its path at commit time.
|
|
||||||
*
|
|
||||||
* It may have a valid path now, but who knows what will happen between
|
|
||||||
* now and commit!
|
|
||||||
*
|
|
||||||
* @param newNodeRef the node to track
|
|
||||||
*/
|
|
||||||
private void trackNewNodeRef(NodeRef newNodeRef)
|
|
||||||
{
|
|
||||||
// bind a pre-commit listener to validate any new node associations
|
|
||||||
Set<NodeRef> newNodes = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_ADD_NODE);
|
|
||||||
if (newNodes.size() == 0)
|
|
||||||
{
|
|
||||||
PreCommitNewNodeListener listener = new PreCommitNewNodeListener();
|
|
||||||
AlfrescoTransactionSupport.bindListener(listener);
|
|
||||||
}
|
|
||||||
newNodes.add(newNodeRef);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Track a deleted node
|
* Track a deleted node
|
||||||
@@ -427,59 +397,6 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class PreCommitNewNodeListener extends TransactionListenerAdapter
|
|
||||||
{
|
|
||||||
public void afterCommit()
|
|
||||||
{
|
|
||||||
// NO-OP
|
|
||||||
}
|
|
||||||
|
|
||||||
public void afterRollback()
|
|
||||||
{
|
|
||||||
// NO-OP
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public void beforeCommit(boolean readOnly)
|
|
||||||
{
|
|
||||||
if (readOnly)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Set<NodeRef> nodeRefs = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_ADD_NODE);
|
|
||||||
for (NodeRef nodeRef : nodeRefs)
|
|
||||||
{
|
|
||||||
// Need to check for exists since the node may be created
|
|
||||||
// and deleted within the same transaction
|
|
||||||
if(exists(nodeRef))
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
// Check that the primary path is valid for this node
|
|
||||||
getPaths(nodeRef, false);
|
|
||||||
}
|
|
||||||
catch (AlfrescoRuntimeException are)
|
|
||||||
{
|
|
||||||
throw new AlfrescoRuntimeException("Error while validating path:" + are.toString(), are);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
nodeRefs.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void beforeCompletion()
|
|
||||||
{
|
|
||||||
// NO-OP
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public void flush()
|
|
||||||
{
|
|
||||||
// NO-OP
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds all the default aspects and properties required for the given type.
|
* Adds all the default aspects and properties required for the given type.
|
||||||
* Existing values will not be overridden.
|
* Existing values will not be overridden.
|
||||||
@@ -1042,6 +959,10 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
|
|||||||
List<Pair<Long, NodeRef>> parentNodePairs = new ArrayList<Pair<Long, NodeRef>>(parentRefs.size());
|
List<Pair<Long, NodeRef>> parentNodePairs = new ArrayList<Pair<Long, NodeRef>>(parentRefs.size());
|
||||||
for (NodeRef parentRef : parentRefs)
|
for (NodeRef parentRef : parentRefs)
|
||||||
{
|
{
|
||||||
|
if (isDeletedNodeRef(parentRef))
|
||||||
|
{
|
||||||
|
throw new InvalidNodeRefException("The parent node has been deleted", parentRef);
|
||||||
|
}
|
||||||
Pair<Long, NodeRef> parentNodePair = getNodePairNotNull(parentRef);
|
Pair<Long, NodeRef> parentNodePair = getNodePairNotNull(parentRef);
|
||||||
Long parentNodeId = parentNodePair.getFirst();
|
Long parentNodeId = parentNodePair.getFirst();
|
||||||
parentNodePairs.add(parentNodePair);
|
parentNodePairs.add(parentNodePair);
|
||||||
@@ -2231,6 +2152,11 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
|
|||||||
QName assocTypeQName,
|
QName assocTypeQName,
|
||||||
QName assocQName)
|
QName assocQName)
|
||||||
{
|
{
|
||||||
|
if (isDeletedNodeRef(newParentRef))
|
||||||
|
{
|
||||||
|
throw new InvalidNodeRefException("The parent node has been deleted", newParentRef);
|
||||||
|
}
|
||||||
|
|
||||||
Pair<Long, NodeRef> nodeToMovePair = getNodePairNotNull(nodeToMoveRef);
|
Pair<Long, NodeRef> nodeToMovePair = getNodePairNotNull(nodeToMoveRef);
|
||||||
Pair<Long, NodeRef> parentNodePair = getNodePairNotNull(newParentRef);
|
Pair<Long, NodeRef> parentNodePair = getNodePairNotNull(newParentRef);
|
||||||
|
|
||||||
@@ -2331,7 +2257,6 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
|
|||||||
|
|
||||||
// Check that there is not a cyclic relationship
|
// Check that there is not a cyclic relationship
|
||||||
getPaths(newNodeToMoveRef, false);
|
getPaths(newNodeToMoveRef, false);
|
||||||
trackNewNodeRef(newNodeToMoveRef);
|
|
||||||
|
|
||||||
// Call behaviours
|
// Call behaviours
|
||||||
if (movingStore)
|
if (movingStore)
|
||||||
|
@@ -32,6 +32,7 @@ import java.util.Collection;
|
|||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.LinkedHashSet;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
@@ -595,7 +596,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
Map<QName, Serializable> properties = nodeService.getProperties(nodeRef);
|
Map<QName, Serializable> properties = nodeService.getProperties(nodeRef);
|
||||||
NodeRef.Status nodeStatus = nodeService.getNodeStatus(nodeRef);
|
NodeRef.Status nodeStatus = nodeService.getNodeStatus(nodeRef);
|
||||||
|
|
||||||
Collection<Path> directPaths = nodeService.getPaths(nodeRef, false);
|
Collection<Path> directPaths = new LinkedHashSet<Path>(nodeService.getPaths(nodeRef, false));
|
||||||
Collection<Pair<Path, QName>> categoryPaths = getCategoryPaths(nodeRef, properties);
|
Collection<Pair<Path, QName>> categoryPaths = getCategoryPaths(nodeRef, properties);
|
||||||
Collection<Pair<Path, QName>> paths = new ArrayList<Pair<Path, QName>>(directPaths.size() + categoryPaths.size());
|
Collection<Pair<Path, QName>> paths = new ArrayList<Pair<Path, QName>>(directPaths.size() + categoryPaths.size());
|
||||||
for (Path path : directPaths)
|
for (Path path : directPaths)
|
||||||
@@ -625,6 +626,8 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
}
|
}
|
||||||
|
|
||||||
boolean isRoot = nodeRef.equals(tenantService.getName(nodeService.getRootNode(nodeRef.getStoreRef())));
|
boolean isRoot = nodeRef.equals(tenantService.getName(nodeService.getRootNode(nodeRef.getStoreRef())));
|
||||||
|
boolean mayHaveChildren = includeDirectoryDocuments && mayHaveChildren(nodeRef);
|
||||||
|
boolean isCategory = isCategory(getDictionaryService().getType(nodeService.getType(nodeRef)));
|
||||||
|
|
||||||
StringBuilder qNameBuffer = new StringBuilder(64);
|
StringBuilder qNameBuffer = new StringBuilder(64);
|
||||||
StringBuilder assocTypeQNameBuffer = new StringBuilder(64);
|
StringBuilder assocTypeQNameBuffer = new StringBuilder(64);
|
||||||
@@ -683,28 +686,25 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl<NodeRef> imp
|
|||||||
|
|
||||||
// check for child associations
|
// check for child associations
|
||||||
|
|
||||||
if (includeDirectoryDocuments)
|
if (mayHaveChildren)
|
||||||
{
|
{
|
||||||
if (mayHaveChildren(nodeRef))
|
if (directPaths.contains(pair.getFirst()))
|
||||||
{
|
{
|
||||||
if (directPaths.contains(pair.getFirst()))
|
Document directoryEntry = new Document();
|
||||||
|
directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||||
|
directoryEntry.add(new Field("PATH", pathString, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
||||||
|
for (NodeRef parent : getParents(pair.getFirst()))
|
||||||
{
|
{
|
||||||
Document directoryEntry = new Document();
|
directoryEntry.add(new Field("ANCESTOR", tenantService.getName(parent).toString(), Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||||
directoryEntry.add(new Field("ID", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
|
||||||
directoryEntry.add(new Field("PATH", pathString, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
|
|
||||||
for (NodeRef parent : getParents(pair.getFirst()))
|
|
||||||
{
|
|
||||||
directoryEntry.add(new Field("ANCESTOR", tenantService.getName(parent).toString(), Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
|
|
||||||
}
|
|
||||||
directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
|
||||||
|
|
||||||
if (isCategory(getDictionaryService().getType(nodeService.getType(nodeRef))))
|
|
||||||
{
|
|
||||||
directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
|
||||||
}
|
|
||||||
|
|
||||||
docs.add(directoryEntry);
|
|
||||||
}
|
}
|
||||||
|
directoryEntry.add(new Field("ISCONTAINER", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||||
|
|
||||||
|
if (isCategory)
|
||||||
|
{
|
||||||
|
directoryEntry.add(new Field("ISCATEGORY", "T", Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
|
||||||
|
}
|
||||||
|
|
||||||
|
docs.add(directoryEntry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -25,6 +25,7 @@ import java.util.Collection;
|
|||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@@ -1152,10 +1153,23 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
|
|||||||
// Filter out associations to unknown parent authorities
|
// Filter out associations to unknown parent authorities
|
||||||
associations.retainAll(allAuthorities);
|
associations.retainAll(allAuthorities);
|
||||||
int insertIndex = authorityPath.size();
|
int insertIndex = authorityPath.size();
|
||||||
for (String parentAuthority : associations)
|
Iterator<String> i = associations.iterator();
|
||||||
|
while (i.hasNext())
|
||||||
{
|
{
|
||||||
|
String parentAuthority = i.next();
|
||||||
|
|
||||||
// Prevent cyclic paths
|
// Prevent cyclic paths
|
||||||
if (!authorityPath.contains(parentAuthority))
|
if (authorityPath.contains(parentAuthority))
|
||||||
|
{
|
||||||
|
if (ChainingUserRegistrySynchronizer.logger.isWarnEnabled())
|
||||||
|
{
|
||||||
|
ChainingUserRegistrySynchronizer.logger.warn("Detected cyclic dependencies in group '"
|
||||||
|
+ ChainingUserRegistrySynchronizer.this.authorityService.getShortName(parentAuthority)
|
||||||
|
+ "'");
|
||||||
|
}
|
||||||
|
i.remove();
|
||||||
|
}
|
||||||
|
else
|
||||||
{
|
{
|
||||||
authorityPath.add(parentAuthority);
|
authorityPath.add(parentAuthority);
|
||||||
visitGroupAssociations(authorityPath, allAuthorities, associationsOld, associationsNew);
|
visitGroupAssociations(authorityPath, allAuthorities, associationsOld, associationsNew);
|
||||||
|
@@ -29,6 +29,7 @@ import java.util.Iterator;
|
|||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
@@ -363,7 +364,7 @@ public class ChainingUserRegistrySynchronizerTest extends TestCase
|
|||||||
public void testVolume() throws Exception
|
public void testVolume() throws Exception
|
||||||
{
|
{
|
||||||
List<NodeDescription> persons = new ArrayList<NodeDescription>(new RandomPersonCollection(100));
|
List<NodeDescription> persons = new ArrayList<NodeDescription>(new RandomPersonCollection(100));
|
||||||
List<NodeDescription> groups = new ArrayList<NodeDescription>(new RandomGroupCollection(100, persons));
|
List<NodeDescription> groups = new ArrayList<NodeDescription>(new RandomGroupCollection(50, persons));
|
||||||
this.applicationContextManager.setUserRegistries(new MockUserRegistry("Z0", persons, groups));
|
this.applicationContextManager.setUserRegistries(new MockUserRegistry("Z0", persons, groups));
|
||||||
this.synchronizer.synchronize(true, true, true);
|
this.synchronizer.synchronize(true, true, true);
|
||||||
tearDownTestUsersAndGroups();
|
tearDownTestUsersAndGroups();
|
||||||
@@ -777,6 +778,8 @@ public class ChainingUserRegistrySynchronizerTest extends TestCase
|
|||||||
*/
|
*/
|
||||||
public class RandomGroupCollection extends AbstractCollection<NodeDescription>
|
public class RandomGroupCollection extends AbstractCollection<NodeDescription>
|
||||||
{
|
{
|
||||||
|
/** Use a fixed seed to give this class deterministic behaviour */
|
||||||
|
private Random generator = new Random(1628876500L);
|
||||||
|
|
||||||
/** The collection size. */
|
/** The collection size. */
|
||||||
private final int size;
|
private final int size;
|
||||||
@@ -839,12 +842,16 @@ public class ChainingUserRegistrySynchronizerTest extends TestCase
|
|||||||
String[] authorityNames = new String[17];
|
String[] authorityNames = new String[17];
|
||||||
for (int i = 0; i < authorityNames.length; i++)
|
for (int i = 0; i < authorityNames.length; i++)
|
||||||
{
|
{
|
||||||
|
// Choose an authority at random from the list of known authorities
|
||||||
|
int index = RandomGroupCollection.this.generator.nextInt(RandomGroupCollection.this.authorities
|
||||||
|
.size());
|
||||||
authorityNames[i] = ChainingUserRegistrySynchronizerTest.this.authorityService
|
authorityNames[i] = ChainingUserRegistrySynchronizerTest.this.authorityService
|
||||||
.getShortName((String) RandomGroupCollection.this.authorities
|
.getShortName((String) RandomGroupCollection.this.authorities.get(index));
|
||||||
.get((int) (Math.random() * (double) (RandomGroupCollection.this.authorities
|
|
||||||
.size() - 1))));
|
|
||||||
}
|
}
|
||||||
return newGroup("G" + GUID.generate(), authorityNames);
|
NodeDescription group = newGroup("G" + GUID.generate(), authorityNames);
|
||||||
|
// Make this group a candidate for adding to other groups
|
||||||
|
RandomGroupCollection.this.authorities.add((String) group.getProperties().get(ContentModel.PROP_AUTHORITY_NAME));
|
||||||
|
return group;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void remove()
|
public void remove()
|
||||||
|
Reference in New Issue
Block a user