diff --git a/config/alfresco/authentication-services-context.xml b/config/alfresco/authentication-services-context.xml
index 6184cae3e4..3cc770f437 100644
--- a/config/alfresco/authentication-services-context.xml
+++ b/config/alfresco/authentication-services-context.xml
@@ -1,273 +1,278 @@
-
-
-
+
+
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
+
+
+
-
+
-
-
+
+
-
-
-
- ${authentication.chain}
-
-
+
+
+
+ ${authentication.chain}
+
+
-
-
-
-
-
-
-
- org.alfresco.repo.security.authentication.MutableAuthenticationDao
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+ org.alfresco.repo.security.authentication.MutableAuthenticationDao
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
- org.alfresco.jlan.server.SessionListener
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+ org.alfresco.jlan.server.SessionListener
+
+
+
+
+
+
+
-
-
-
-
-
- cifsAuthenticator
-
-
-
- org.alfresco.jlan.server.auth.ICifsAuthenticator
- org.alfresco.repo.management.subsystems.ActivateableBean
-
-
-
+
+
+
+
+
+ cifsAuthenticator
+
+
+
+ org.alfresco.jlan.server.auth.ICifsAuthenticator
+ org.alfresco.repo.management.subsystems.ActivateableBean
+
+
+
-
-
-
-
-
- ftpAuthenticator
-
-
-
- org.alfresco.jlan.ftp.FTPAuthenticator
- org.alfresco.repo.management.subsystems.ActivateableBean
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+ ftpAuthenticator
+
+
+
+ org.alfresco.jlan.ftp.FTPAuthenticator
+ org.alfresco.repo.management.subsystems.ActivateableBean
+
+
+
+
+
+
+
-
+
+
+
+
+
-
-
-
+
+
+
-
-
-
-
-
-
-
-
- localAuthenticationService
-
-
+
+
+
+
+
+
+
+
+ localAuthenticationService
+
+
-
+
-
-
- org.alfresco.repo.security.authentication.AuthenticationComponent
-
-
-
-
-
-
-
-
-
- ${server.transaction.mode.default}
-
-
-
+
+
+ org.alfresco.repo.security.authentication.AuthenticationComponent
+
+
+
+
+
+
+
+
+
+ ${server.transaction.mode.default}
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- authenticationComponent
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ authenticationComponent
+
+
-
-
-
-
-
-
-
- org.alfresco.repo.security.sync.UserRegistrySynchronizer
-
-
-
+
+
+
+
+
+
+
+ org.alfresco.repo.security.sync.UserRegistrySynchronizer
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
+
+
+
-
+
-
-
-
-
- ${user.name.caseSensitive}
-
-
- ${domain.name.caseSensitive}
-
-
- ${domain.separator}
-
-
-
-
+
+
+
+
+ ${user.name.caseSensitive}
+
+
+ ${domain.name.caseSensitive}
+
+
+ ${domain.separator}
+
+
+
+
@@ -280,54 +285,55 @@
-
-
+
-
-
-
-
-
-
-
-
-
- ${spaces.store}
-
-
-
-
-
-
-
-
-
-
- ${server.transaction.allow-writes}
-
-
-
-
-
-
- true
-
-
-
- SPLIT
-
-
- true
-
-
- false
-
-
+
+
+ ${home.folder.creation.eager}
+
+
+
+
+
+
+
+
+ ${spaces.store}
+
+
+
+
+
+
+
+
+
+
+ ${server.transaction.allow-writes}
+
+
+
+
+
+
+ true
+
+
+
+ SPLIT
+
+
+ true
+
+
+ false
+
+
@@ -346,288 +352,310 @@
-
-
-
-
-
-
-
-
- All
-
-
-
-
- All
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ${home.folder.creation.eager}
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+ All
+
+
+
+
+ All
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+ org.alfresco.repo.security.person.HomeFolderManager
+
+
+
+
+
+
+
+
+
+
+
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
-
-
- ${spaces.user_homes.regex.key}
-
-
- ${spaces.user_homes.regex.pattern}
-
-
- ${spaces.user_homes.regex.group_order}
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${spaces.user_homes.regex.key}
+
+
+ ${spaces.user_homes.regex.pattern}
+
+
+ ${spaces.user_homes.regex.group_order}
+
+
-
-
- /${spaces.company_home.childname}
-
-
- ${spaces.store}
-
-
+
+
+ /${spaces.company_home.childname}
+
+
+ ${spaces.store}
+
+
-
-
-
-
-
-
-
-
-
- Consumer
-
-
-
-
-
-
-
- /${spaces.company_home.childname}/${spaces.guest_home.childname}
-
-
- ${spaces.store}
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+ Consumer
+
+
+
-
-
-
-
-
-
-
-
-
- false
-
-
-
- All
-
-
-
-
- All
-
-
-
-
-
-
-
-
-
-
-
-
-
- All
-
-
-
-
-
-
- /${spaces.company_home.childname}
-
-
- ${spaces.store}
-
-
+
+
+ /${spaces.company_home.childname}/${spaces.guest_home.childname}
+
+
+ ${spaces.store}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+ All
+
+
+
+
+ All
+
+
+
+
+
+
+
+
+
+
+
+
+
+ All
+
+
+
+
+
+
+ /${spaces.company_home.childname}
+
+
+ ${spaces.store}
+
+
+
+
+
+ /${spaces.company_home.childname}/${spaces.user_homes.childname}
+
+
+ ${spaces.store}
+
+
+
+
+
+ /${spaces.company_home.childname}/${spaces.user_homes.childname}
+
+
+ ${spaces.store}
+
+
+
+
+
+
+
+
+ org.alfresco.repo.security.authentication.TicketComponent
+
+
+
+
+
+
+
+
+
+ ${authentication.ticket.validDuration}
+
+
+
+ ${authentication.ticket.ticketsExpire}
+
+
+
+ false
+
+
+
+
+
+ ${authentication.ticket.expiryMode}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ %firstName%_%lastName%
+
+
+
+ 10
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 8
+
+
+
+
+
+
+ ${alfresco_user_store.adminusername}
+
+
+ ${alfresco_user_store.guestusername}
+
+
-
-
- /${spaces.company_home.childname}/${spaces.user_homes.childname}
-
-
- ${spaces.store}
-
-
-
-
-
- /${spaces.company_home.childname}/${spaces.user_homes.childname}
-
-
- ${spaces.store}
-
-
-
-
-
-
-
-
- org.alfresco.repo.security.authentication.TicketComponent
-
-
-
-
-
-
-
-
-
- ${authentication.ticket.validDuration}
-
-
-
- ${authentication.ticket.ticketsExpire}
-
-
-
- false
-
-
-
-
-
- ${authentication.ticket.expiryMode}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- %firstName%_%lastName%
-
-
-
- 10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 8
-
-
-
-
-
- ${alfresco_user_store.adminusername}
- ${alfresco_user_store.guestusername}
-
-
diff --git a/config/alfresco/bootstrap-context.xml b/config/alfresco/bootstrap-context.xml
index 30224b04f3..cfcc721df1 100644
--- a/config/alfresco/bootstrap-context.xml
+++ b/config/alfresco/bootstrap-context.xml
@@ -695,7 +695,6 @@
-
@@ -703,6 +702,15 @@
+
+ ${home_folder_provider_synchronizer.enabled}
+
+
+ ${home_folder_provider_synchronizer.override_provider}
+
+
+ ${home_folder_provider_synchronizer.keep_empty_parents}
+
diff --git a/config/alfresco/core-services-context.xml b/config/alfresco/core-services-context.xml
index 439a6745c4..79899178cf 100644
--- a/config/alfresco/core-services-context.xml
+++ b/config/alfresco/core-services-context.xml
@@ -758,8 +758,9 @@
alfresco/messages/forum-model
alfresco/messages/email-server-model
alfresco/messages/data-list-model
- alfresco/messages/transfer-model
- alfresco/messages/publishing-model
+ alfresco/messages/transfer-model
+ alfresco/messages/wcmapp-model
+ alfresco/messages/publishing-model
@@ -1235,7 +1236,7 @@
- search.LuceneFullTextSearchIndexer
+ search.luceneFullTextSearchIndexer
diff --git a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml
index 1a24475249..6da6cd4f01 100644
--- a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml
+++ b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/node-common-SqlMap.xml
@@ -132,6 +132,9 @@
+
+
+
@@ -847,6 +850,10 @@
assoc.is_primary as is_primary,
assoc.assoc_index as assoc_index
+
+
+ ,childNode.transaction_id as childNodeTxnId
+
from
alf_child_assoc assoc
@@ -1070,8 +1077,8 @@
)
-
-
-
-
+
+
+
+
+
+
+
+
diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties
index 03482b98e8..7ac609d102 100644
--- a/config/alfresco/repository.properties
+++ b/config/alfresco/repository.properties
@@ -364,6 +364,11 @@ alfresco_user_store.adminpassword=209c6174da490caeb422f3fa5a7ae634
# note: default guest username - should not be changed after installation
alfresco_user_store.guestusername=guest
+# Used to move home folders to a new location
+home_folder_provider_synchronizer.enabled=false
+home_folder_provider_synchronizer.override_provider=
+home_folder_provider_synchronizer.keep_empty_parents=false
+
# Spaces Archive Configuration
spaces.archive.store=archive://SpacesStore
diff --git a/config/alfresco/subsystems/Search/lucene/index-recovery-context.xml b/config/alfresco/subsystems/Search/lucene/index-recovery-context.xml
index 93090f8718..3a3d359145 100644
--- a/config/alfresco/subsystems/Search/lucene/index-recovery-context.xml
+++ b/config/alfresco/subsystems/Search/lucene/index-recovery-context.xml
@@ -32,7 +32,7 @@
-
+
diff --git a/config/alfresco/subsystems/Search/lucene/lucene-search-context.xml b/config/alfresco/subsystems/Search/lucene/lucene-search-context.xml
index e57a3a5c81..6e7f8d6c4f 100644
--- a/config/alfresco/subsystems/Search/lucene/lucene-search-context.xml
+++ b/config/alfresco/subsystems/Search/lucene/lucene-search-context.xml
@@ -317,6 +317,9 @@
+
+
+
@@ -365,6 +368,9 @@
+
+
+
@@ -392,6 +398,9 @@
+
+
+
@@ -454,28 +463,13 @@
-
-
- org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer
-
-
-
-
-
-
-
-
-
- ${server.transaction.mode.default}
-
-
-
-
+
+
+
${fts.indexer.batchSize}
diff --git a/config/alfresco/subsystems/Search/lucene/scheduled-jobs-context.xml b/config/alfresco/subsystems/Search/lucene/scheduled-jobs-context.xml
index 172f42e807..49c2e1c6af 100644
--- a/config/alfresco/subsystems/Search/lucene/scheduled-jobs-context.xml
+++ b/config/alfresco/subsystems/Search/lucene/scheduled-jobs-context.xml
@@ -12,7 +12,7 @@
diff --git a/config/alfresco/workflow/wcm-workflow-messages.properties b/config/alfresco/workflow/wcm-workflow-messages.properties
index d6b81dbed6..b983ae3199 100644
--- a/config/alfresco/workflow/wcm-workflow-messages.properties
+++ b/config/alfresco/workflow/wcm-workflow-messages.properties
@@ -26,6 +26,8 @@ wcmwf_submit.node.submitpending.transition.cancel.title=Abort Submission
wcmwf_submit.node.submitpending.transition.cancel.description=Abort Submission
wcmwf_submit.node.submitpending.transition.launch.title=Submit Now
wcmwf_submit.node.submitpending.transition.launch.description=Submit Now
+wcmwf_submit.node.submitted.transition.onsubmit.title=On Submit
+wcmwf_submit.node.submitted.transition.onsubmit.description=On Submit
wcmwf_submitdirect.workflow.title=Web Site Submission (Direct)
wcmwf_submitdirect.workflow.description=Submit changes direct to staging sandbox
diff --git a/config/alfresco/workflow/wcm-workflow-messages_de.properties b/config/alfresco/workflow/wcm-workflow-messages_de.properties
index 31f0efc998..945df2c994 100755
--- a/config/alfresco/workflow/wcm-workflow-messages_de.properties
+++ b/config/alfresco/workflow/wcm-workflow-messages_de.properties
@@ -26,6 +26,8 @@ wcmwf_submit.node.submitpending.transition.cancel.title=Vorlage abbrechen
wcmwf_submit.node.submitpending.transition.cancel.description=Vorlage abbrechen
wcmwf_submit.node.submitpending.transition.launch.title=Jetzt vorlegen
wcmwf_submit.node.submitpending.transition.launch.description=Jetzt vorlegen
+wcmwf_submit.node.submitted.transition.onsubmit.title=Bei Vorlage
+wcmwf_submit.node.submitted.transition.onsubmit.description=Bei Vorlage
wcmwf_submitdirect.workflow.title=Site Vorlage (direkt)
wcmwf_submitdirect.workflow.description=Der Staging Sandbox die \u00c4nderungen direkt vorlegen
diff --git a/config/alfresco/workflow/wcm-workflow-messages_es.properties b/config/alfresco/workflow/wcm-workflow-messages_es.properties
index b3df4bded7..51467e86dc 100755
--- a/config/alfresco/workflow/wcm-workflow-messages_es.properties
+++ b/config/alfresco/workflow/wcm-workflow-messages_es.properties
@@ -26,6 +26,8 @@ wcmwf_submit.node.submitpending.transition.cancel.title=Abortar env\u00edo
wcmwf_submit.node.submitpending.transition.cancel.description=Abortar env\u00edo
wcmwf_submit.node.submitpending.transition.launch.title=Enviar ahora
wcmwf_submit.node.submitpending.transition.launch.description=Enviar ahora
+wcmwf_submit.node.submitted.transition.onsubmit.title=En env\u00edo
+wcmwf_submit.node.submitted.transition.onsubmit.description=En env\u00edo
wcmwf_submitdirect.workflow.title=Env\u00edo de sitio Web (directo)
wcmwf_submitdirect.workflow.description=Enviar cambios directamente al espacio de prueba inicial
diff --git a/config/alfresco/workflow/wcm-workflow-messages_fr.properties b/config/alfresco/workflow/wcm-workflow-messages_fr.properties
index 5a81fd5d50..f2be917f92 100755
--- a/config/alfresco/workflow/wcm-workflow-messages_fr.properties
+++ b/config/alfresco/workflow/wcm-workflow-messages_fr.properties
@@ -26,6 +26,8 @@ wcmwf_submit.node.submitpending.transition.cancel.title=Abandonner la soumission
wcmwf_submit.node.submitpending.transition.cancel.description=Abandonner la soumission
wcmwf_submit.node.submitpending.transition.launch.title=Soumettre maintenant
wcmwf_submit.node.submitpending.transition.launch.description=Soumettre maintenant
+wcmwf_submit.node.submitted.transition.onsubmit.title=A la soumission
+wcmwf_submit.node.submitted.transition.onsubmit.description=A la soumission
wcmwf_submitdirect.workflow.title=Soumission de site Web (directe)
wcmwf_submitdirect.workflow.description=Soumettre les modifications directement dans le bac \u00e0 sable de r\u00e9serve
diff --git a/config/alfresco/workflow/wcm-workflow-messages_it.properties b/config/alfresco/workflow/wcm-workflow-messages_it.properties
index 10f3142926..537558394d 100755
--- a/config/alfresco/workflow/wcm-workflow-messages_it.properties
+++ b/config/alfresco/workflow/wcm-workflow-messages_it.properties
@@ -26,6 +26,8 @@ wcmwf_submit.node.submitpending.transition.cancel.title=Interrompi invio
wcmwf_submit.node.submitpending.transition.cancel.description=Interrompi invio
wcmwf_submit.node.submitpending.transition.launch.title=Invia ora
wcmwf_submit.node.submitpending.transition.launch.description=Invia ora
+wcmwf_submit.node.submitted.transition.onsubmit.title=In sottomissione
+wcmwf_submit.node.submitted.transition.onsubmit.description=In sottomissione
wcmwf_submitdirect.workflow.title=Invio sito web (diretto)
wcmwf_submitdirect.workflow.description=Invia le modifiche direttamente alla sandbox temporanea
diff --git a/config/alfresco/workflow/wcm-workflow-messages_ja.properties b/config/alfresco/workflow/wcm-workflow-messages_ja.properties
index 823be36729..e9e1f28ca0 100755
--- a/config/alfresco/workflow/wcm-workflow-messages_ja.properties
+++ b/config/alfresco/workflow/wcm-workflow-messages_ja.properties
@@ -26,6 +26,8 @@ wcmwf_submit.node.submitpending.transition.cancel.title=\u63d0\u51fa\u306e\u505c
wcmwf_submit.node.submitpending.transition.cancel.description=\u63d0\u51fa\u306e\u505c\u6b62
wcmwf_submit.node.submitpending.transition.launch.title=\u4eca\u3059\u3050\u9001\u4fe1
wcmwf_submit.node.submitpending.transition.launch.description=\u4eca\u3059\u3050\u9001\u4fe1
+wcmwf_submit.node.submitted.transition.onsubmit.title=\u9001\u4fe1\u3057\u307e\u3059
+wcmwf_submit.node.submitted.transition.onsubmit.description=\u9001\u4fe1\u3057\u307e\u3059
wcmwf_submitdirect.workflow.title=Web\u30b5\u30a4\u30c8\u63d0\u51fa\uff08\u76f4\u63a5\uff09
wcmwf_submitdirect.workflow.description=\u30b9\u30c6\u30fc\u30b8\u30f3\u30b0\u30fb\u30b5\u30f3\u30c9\u30dc\u30c3\u30af\u30b9\u306b\u5909\u66f4\u3092\u76f4\u63a5\u9001\u4fe1
diff --git a/source/java/org/alfresco/repo/action/executer/CopyActionExecuter.java b/source/java/org/alfresco/repo/action/executer/CopyActionExecuter.java
index 7d8d40e9ca..1a1a852a90 100644
--- a/source/java/org/alfresco/repo/action/executer/CopyActionExecuter.java
+++ b/source/java/org/alfresco/repo/action/executer/CopyActionExecuter.java
@@ -54,24 +54,29 @@ public class CopyActionExecuter extends ActionExecuterAbstractBase
public static final String PARAM_OVERWRITE_COPY = "overwrite-copy";
private CopyService copyService;
- private NodeService nodeService;
- private CheckOutCheckInService checkOutCheckInService;
+ /**
+ * The node service
+ */
+ private NodeService nodeService;
+ private CheckOutCheckInService checkOutCheckInService;
+
/**
* Sets the node service
*/
- public void setNodeService(NodeService nodeService)
- {
- this.nodeService = nodeService;
- }
-
+ public void setNodeService(NodeService nodeService)
+ {
+ this.nodeService = nodeService;
+ }
+
/**
* Sets the copy service
*/
- public void setCopyService(CopyService copyService)
- {
- this.copyService = copyService;
- }
+ public void setCopyService(CopyService copyService)
+ {
+ this.copyService = copyService;
+ }
+
/**
* Service to determine check-in or check-out status
@@ -82,30 +87,30 @@ public class CopyActionExecuter extends ActionExecuterAbstractBase
}
@Override
- protected void addParameterDefinitions(List paramList)
- {
- paramList.add(new ParameterDefinitionImpl(PARAM_DESTINATION_FOLDER, DataTypeDefinition.NODE_REF, true, getParamDisplayLabel(PARAM_DESTINATION_FOLDER)));
- paramList.add(new ParameterDefinitionImpl(PARAM_DEEP_COPY, DataTypeDefinition.BOOLEAN, false, getParamDisplayLabel(PARAM_DEEP_COPY)));
+ protected void addParameterDefinitions(List paramList)
+ {
+ paramList.add(new ParameterDefinitionImpl(PARAM_DESTINATION_FOLDER, DataTypeDefinition.NODE_REF, true, getParamDisplayLabel(PARAM_DESTINATION_FOLDER)));
+ paramList.add(new ParameterDefinitionImpl(PARAM_DEEP_COPY, DataTypeDefinition.BOOLEAN, false, getParamDisplayLabel(PARAM_DEEP_COPY)));
paramList.add(new ParameterDefinitionImpl(PARAM_OVERWRITE_COPY, DataTypeDefinition.BOOLEAN, false, getParamDisplayLabel(PARAM_OVERWRITE_COPY)));
- }
+ }
@Override
public void executeImpl(Action ruleAction, NodeRef actionedUponNodeRef)
{
if (!nodeService.exists(actionedUponNodeRef))
- {
+ {
return;
}
- NodeRef destinationParent = (NodeRef)ruleAction.getParameterValue(PARAM_DESTINATION_FOLDER);
-
- // Get the deep copy value
- boolean deepCopy = false;
+ NodeRef destinationParent = (NodeRef)ruleAction.getParameterValue(PARAM_DESTINATION_FOLDER);
+
+ // Get the deep copy value
+ boolean deepCopy = false;
Boolean deepCopyValue = (Boolean)ruleAction.getParameterValue(PARAM_DEEP_COPY);
if (deepCopyValue != null)
{
deepCopy = deepCopyValue.booleanValue();
}
-
+
// Get the overwirte value
boolean overwrite = true;
Boolean overwriteValue = (Boolean)ruleAction.getParameterValue(PARAM_OVERWRITE_COPY);
@@ -154,11 +159,11 @@ public class CopyActionExecuter extends ActionExecuterAbstractBase
ChildAssociationRef originalAssoc = nodeService.getPrimaryParent(actionedUponNodeRef);
// Create a new copy of the node
this.copyService.copyAndRename(
- actionedUponNodeRef,
- destinationParent,
- originalAssoc.getTypeQName(),
- originalAssoc.getQName(),
- deepCopy);
+ actionedUponNodeRef,
+ destinationParent,
+ originalAssoc.getTypeQName(),
+ originalAssoc.getQName(),
+ deepCopy);
}
- }
+ }
}
diff --git a/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java b/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
index bc6730c6e0..27cb1f1006 100644
--- a/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
+++ b/source/java/org/alfresco/repo/domain/node/AbstractNodeDAOImpl.java
@@ -519,7 +519,20 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
selectChildAssocs(parentNodeId, null, null, null, null, null, callback);
}
}
-
+
+ /**
+ * Invalidates all cached artefacts for a particular node, forcing a refresh.
+ *
+ * @param nodeId the node ID
+ */
+ private void invalidateNodeCaches(Long nodeId)
+ {
+ invalidateCachesByNodeId(null, nodeId, nodesCache);
+ invalidateCachesByNodeId(null, nodeId, propertiesCache);
+ invalidateCachesByNodeId(null, nodeId, aspectsCache);
+ invalidateCachesByNodeId(null, nodeId, parentAssocsCache);
+ }
+
/*
* Transactions
*/
@@ -792,27 +805,52 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
public Status getNodeRefStatus(NodeRef nodeRef)
{
- // First check the cache of live nodes
- Node node = new NodeEntity(nodeRef);
- Pair pair = nodesCache.getByValue(node);
- if (pair == null)
+ Node node = null;
+
+ // Stage 1: check the cache without reading through
+ Long nodeId = nodesCache.getKey(nodeRef);
+ if (nodeId != null)
{
- // It's not there, so select ignoring the 'deleted' flag
- node = selectNodeByNodeRef(nodeRef, null);
+ node = nodesCache.getValue(nodeId);
+ // If the node isn't for the current transaction, we are probably reindexing. So invalidate the cache,
+ // forcing a read through.
+ if (node == null || AlfrescoTransactionSupport.getTransactionReadState() != TxnReadState.TXN_READ_WRITE
+ || !getCurrentTransaction().getId().equals(node.getTransaction().getId())
+ || !node.getNodeRef().equals(nodeRef))
+ {
+ invalidateNodeCaches(nodeId);
+ node = null;
+ }
}
- else
+
+ // Stage 2, read through to the database, caching results if appropriate
+ if (node == null)
{
- node = pair.getSecond();
+ Pair pair = nodesCache.getByValue(new NodeEntity(nodeRef));
+ if (pair == null)
+ {
+ // It's not there, so select ignoring the 'deleted' flag
+ node = selectNodeByNodeRef(nodeRef, null);
+ if (node != null)
+ {
+ // Invalidate anything cached for this node ID, just in case it has moved store, etc.
+ invalidateNodeCaches(node.getId());
+ }
+ }
+ else
+ {
+ // We have successfully populated the cache
+ node = pair.getSecond();
+ }
}
+
if (node == null)
{
return null;
}
- else
- {
- Transaction txn = node.getTransaction();
- return new NodeRef.Status(nodeRef, txn.getChangeTxnId(), txn.getId(), node.getDeleted());
- }
+
+ Transaction txn = node.getTransaction();
+ return new NodeRef.Status(nodeRef, txn.getChangeTxnId(), txn.getId(), node.getDeleted());
}
public Pair getNodePair(NodeRef nodeRef)
@@ -930,7 +968,8 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// There will be no other parent assocs
boolean isRoot = false;
boolean isStoreRoot = nodeTypeQName.equals(ContentModel.TYPE_STOREROOT);
- ParentAssocsInfo parentAssocsInfo = new ParentAssocsInfo(isRoot, isStoreRoot, assoc);
+ ParentAssocsInfo parentAssocsInfo = new ParentAssocsInfo(node.getTransaction().getId(), isRoot, isStoreRoot,
+ assoc);
parentAssocsCache.setValue(nodeId, parentAssocsInfo);
// Ensure that cm:auditable values are propagated, if required
@@ -3156,6 +3195,22 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
if (!hasParents && !parentAssocInfo.isRoot())
{
+ // We appear to have an orphaned node. But we may just have a temporarily out of sync clustered cache or a
+ // transaction that started ages before the one that committed the cache content!. So double check the node
+ // isn't actually deleted.
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Stale cache detected for Node #" + currentNodeId + ": removing from cache.");
+ }
+ invalidateNodeCaches(currentNodeId);
+
+ Status currentNodeStatus = getNodeRefStatus(currentNodeRef);
+ if (currentNodeStatus == null || currentNodeStatus.isDeleted())
+ {
+ // Force a retry. The cached node was stale
+ throw new DataIntegrityViolationException("Stale cache detected for Node #" + currentNodeId);
+ }
+ // We have a corrupt repository
throw new RuntimeException("Node without parents does not have root aspect: " + currentNodeRef);
}
// walk up each parent association
@@ -3210,12 +3265,40 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
*/
private ParentAssocsInfo getParentAssocsCached(Long nodeId)
{
- Pair cacheEntry = parentAssocsCache.getByKey(nodeId);
- if (cacheEntry == null)
+ // We try to protect here against 'skew' between a cached node and its parent associations
+ // Unfortunately due to overlapping DB transactions and consistent read behaviour a thread
+ // can end up loading old associations and succeed in committing them to the shared cache
+ // without any conflicts
+
+ // Allow for a single retry after cache validation
+ for (int i = 0; i < 2; i++)
{
- throw new DataIntegrityViolationException("Invalid node ID: " + nodeId);
+ Pair cacheEntry = parentAssocsCache.getByKey(nodeId);
+ if (cacheEntry == null)
+ {
+ throw new DataIntegrityViolationException("Invalid node ID: " + nodeId);
+ }
+ Node child = getNodeNotNull(nodeId);
+ ParentAssocsInfo parentAssocsInfo = cacheEntry.getSecond();
+ // Validate that we aren't pairing up a cached node with historic parent associations from an old
+ // transaction (or the other way around)
+ Long txnId = parentAssocsInfo.getTxnId();
+ if (txnId != null && !txnId.equals(child.getTransaction().getId()))
+ {
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Stale cached node #" + nodeId
+ + " detected loading parent associations. Cached transaction ID: "
+ + child.getTransaction().getId() + ", actual transaction ID: " + txnId);
+ }
+ invalidateNodeCaches(nodeId);
+ }
+ else
+ {
+ return parentAssocsInfo;
+ }
}
- return cacheEntry.getSecond();
+ throw new DataIntegrityViolationException("Stale cache detected for Node #" + nodeId);
}
private ParentAssocsInfo getParentAssocsCacheOnly(Long nodeId)
@@ -3253,9 +3336,13 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Select all the parent associations
List assocs = selectParentAssocs(nodeId);
+
+ // Retrieve the transaction ID from the DB for validation purposes - prevents skew between a cached node and
+ // its parent assocs
+ Long txnId = assocs.isEmpty() ? null : assocs.get(0).getChildNode().getTransaction().getId();
// Build the cache object
- ParentAssocsInfo value = new ParentAssocsInfo(isRoot, isStoreRoot, assocs);
+ ParentAssocsInfo value = new ParentAssocsInfo(txnId, isRoot, isStoreRoot, assocs);
// Done
return new Pair(nodeId, value);
}
@@ -3569,6 +3656,7 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
{
Long nodeId = node.getId();
Node cached = nodesCache.getValue(nodeId);
+ ParentAssocsInfo cachedParents = parentAssocsCache.getValue(nodeId);
if (cached != null && !txnId.equals(cached.getTransaction().getId()))
{
if (logger.isDebugEnabled())
@@ -3577,11 +3665,36 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
+ " detected during transaction tracking. Cached transaction ID: "
+ cached.getTransaction().getId() + ", actual transaction ID: " + txnId);
}
- invalidateCachesByNodeId(null, nodeId, nodesCache);
- invalidateCachesByNodeId(null, nodeId, parentAssocsCache);
+ invalidateNodeCaches(nodeId);
}
+ else if (cachedParents != null && !txnId.equals(cachedParents.getTxnId()))
+ {
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Stale cached parent associations for node #" + nodeId
+ + " detected during transaction tracking. Cached transaction ID: "
+ + cachedParents.getTxnId() + ", actual transaction ID: " + txnId);
+ }
+ invalidateNodeCaches(nodeId);
+ }
+
+ // It's possible that a noderef has been remapped (e.g. node moved store) so make sure we don't have a stale
+ // mapping for this noderef either
+ Long oldNodeId = nodesCache.getKey(node.getNodeRef());
+ if (oldNodeId != null && !(oldNodeId.equals(nodeId)))
+ {
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Stale cached noderef " + node.getNodeRef()
+ + " detected during transaction tracking. Cached node ID: "
+ + oldNodeId + ", actual node ID: " + nodeId);
+ }
+ invalidateNodeCaches(oldNodeId);
+ }
+
nodeStatuses.add(node.getNodeStatus());
}
+
// Done
return nodeStatuses;
}
diff --git a/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java b/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java
index b4c08683a3..466b8665a2 100644
--- a/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java
+++ b/source/java/org/alfresco/repo/domain/node/ParentAssocsInfo.java
@@ -30,7 +30,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
- * Object to keep hold of a node and it's parent associations.
+ * Object to keep hold of a node and its parent associations.
*
* @author David Ward
* @author Derek Hulley
@@ -44,6 +44,7 @@ import org.apache.commons.logging.LogFactory;
private static Set warnedDuplicateParents = new HashSet(3);
+ private final Long txnId;
private final boolean isRoot;
private final boolean isStoreRoot;
private final Long primaryAssocId;
@@ -52,15 +53,16 @@ import org.apache.commons.logging.LogFactory;
/**
* Constructor to provide clean initial version of a Node's parent association
*/
- ParentAssocsInfo(boolean isRoot, boolean isStoreRoot, ChildAssocEntity parent)
+ ParentAssocsInfo(Long txnId, boolean isRoot, boolean isStoreRoot, ChildAssocEntity parent)
{
- this(isRoot, isStoreRoot, Collections.singletonList(parent));
+ this(txnId, isRoot, isStoreRoot, Collections.singletonList(parent));
}
/**
* Constructor to provide clean initial version of a Node's parent associations
*/
- ParentAssocsInfo(boolean isRoot, boolean isStoreRoot, List extends ChildAssocEntity> parents)
+ ParentAssocsInfo(Long txnId, boolean isRoot, boolean isStoreRoot, List extends ChildAssocEntity> parents)
{
+ this.txnId = txnId;
this.isRoot = isRoot;
this.isStoreRoot = isStoreRoot;
Long primaryAssocId = null;
@@ -105,11 +107,13 @@ import org.apache.commons.logging.LogFactory;
* Private constructor used to copy existing values.
*/
private ParentAssocsInfo(
+ Long txnId,
boolean isRoot,
boolean isStoreRoot,
Map parentAssocsById,
Long primaryAssocId)
{
+ this.txnId = txnId;
this.isRoot = isRoot;
this.isStoreRoot = isStoreRoot;
this.parentAssocsById = Collections.unmodifiableMap(parentAssocsById);
@@ -121,7 +125,8 @@ import org.apache.commons.logging.LogFactory;
{
StringBuilder builder = new StringBuilder();
builder.append("ParentAssocsInfo ")
- .append("[isRoot=").append(isRoot)
+ .append("[txnId=").append(txnId)
+ .append(", isRoot=").append(isRoot)
.append(", isStoreRoot=").append(isStoreRoot)
.append(", parentAssocsById=").append(parentAssocsById)
.append(", primaryAssocId=").append(primaryAssocId)
@@ -129,6 +134,11 @@ import org.apache.commons.logging.LogFactory;
return builder.toString();
}
+ public Long getTxnId()
+ {
+ return txnId;
+ }
+
public boolean isRoot()
{
return isRoot;
@@ -151,25 +161,25 @@ import org.apache.commons.logging.LogFactory;
public ParentAssocsInfo changeIsRoot(boolean isRoot)
{
- return new ParentAssocsInfo(isRoot, this.isRoot, parentAssocsById, primaryAssocId);
+ return new ParentAssocsInfo(this.txnId, isRoot, this.isRoot, parentAssocsById, primaryAssocId);
}
public ParentAssocsInfo changeIsStoreRoot(boolean isStoreRoot)
{
- return new ParentAssocsInfo(this.isRoot, isStoreRoot, parentAssocsById, primaryAssocId);
+ return new ParentAssocsInfo(this.txnId, this.isRoot, isStoreRoot, parentAssocsById, primaryAssocId);
}
public ParentAssocsInfo addAssoc(Long assocId, ChildAssocEntity parentAssoc)
{
Map parentAssocs = new HashMap(parentAssocsById);
parentAssocs.put(parentAssoc.getId(), parentAssoc);
- return new ParentAssocsInfo(isRoot, isStoreRoot, parentAssocs, primaryAssocId);
+ return new ParentAssocsInfo(this.txnId, isRoot, isStoreRoot, parentAssocs, primaryAssocId);
}
public ParentAssocsInfo removeAssoc(Long assocId)
{
Map parentAssocs = new HashMap(parentAssocsById);
parentAssocs.remove(assocId);
- return new ParentAssocsInfo(isRoot, isStoreRoot, parentAssocs, primaryAssocId);
+ return new ParentAssocsInfo(this.txnId, isRoot, isStoreRoot, parentAssocs, primaryAssocId);
}
}
diff --git a/source/java/org/alfresco/repo/jscript/ScriptNode.java b/source/java/org/alfresco/repo/jscript/ScriptNode.java
index 676103d479..b1f8e5d859 100644
--- a/source/java/org/alfresco/repo/jscript/ScriptNode.java
+++ b/source/java/org/alfresco/repo/jscript/ScriptNode.java
@@ -1863,7 +1863,14 @@ public class ScriptNode implements Serializable, Scopeable, NamespacePrefixResol
*/
public ScriptNode copy(ScriptNode destination)
{
- return copy(destination, false);
+ ScriptNode copy = copy(destination, false);
+
+ // ALF-9517 fix
+ if (copy != null && copy.hasAspect(ContentModel.ASPECT_VERSIONABLE.toString()))
+ {
+ copy.ensureVersioningEnabled(true, true);
+ }
+ return copy;
}
/**
diff --git a/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java b/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java
index 4fcd20338a..1bf5c35bc1 100644
--- a/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java
+++ b/source/java/org/alfresco/repo/node/index/AbstractReindexComponent.java
@@ -20,9 +20,13 @@ package org.alfresco.repo.node.index;
import java.io.PrintWriter;
import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
@@ -36,6 +40,8 @@ import org.alfresco.repo.domain.node.NodeDAO;
import org.alfresco.repo.domain.node.Transaction;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.impl.lucene.AbstractLuceneQueryParser;
+import org.alfresco.repo.search.impl.lucene.LuceneQueryParser;
+import org.alfresco.repo.search.impl.lucene.LuceneResultSetRow;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
import org.alfresco.repo.security.authentication.AuthenticationComponent;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
@@ -49,14 +55,19 @@ import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
+import org.alfresco.service.cmr.repository.NodeRef.Status;
import org.alfresco.service.cmr.search.ResultSet;
+import org.alfresco.service.cmr.search.ResultSetRow;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchService;
+import org.alfresco.util.Pair;
import org.alfresco.util.ParameterCheck;
import org.alfresco.util.PropertyCheck;
import org.alfresco.util.VmShutdownListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
/**
* Abstract helper for reindexing.
@@ -345,16 +356,16 @@ public abstract class AbstractReindexComponent implements IndexRecovery
* Helper method that caches ADM store references to prevent repeated and unnecessary calls to the
* NodeService for this list.
*/
- private List getAdmStoreRefs()
+ private Set getAdmStoreRefs()
{
- List storeRefs = (List) AlfrescoTransactionSupport.getResource(KEY_STORE_REFS);
+ Set storeRefs = (Set) AlfrescoTransactionSupport.getResource(KEY_STORE_REFS);
if (storeRefs != null)
{
return storeRefs;
}
else
{
- storeRefs = nodeService.getStores();
+ storeRefs = new HashSet(nodeService.getStores());
Iterator storeRefsIterator = storeRefs.iterator();
while (storeRefsIterator.hasNext())
{
@@ -377,17 +388,6 @@ public abstract class AbstractReindexComponent implements IndexRecovery
}
}
- // Change the ordering to favour the most common stores
- if (storeRefs.contains(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE))
- {
- storeRefs.remove(StoreRef.STORE_REF_ARCHIVE_SPACESSTORE);
- storeRefs.add(0, StoreRef.STORE_REF_ARCHIVE_SPACESSTORE);
- }
- if (storeRefs.contains(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE))
- {
- storeRefs.remove(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
- storeRefs.add(0, StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
- }
// Bind it in
AlfrescoTransactionSupport.bindResource(KEY_STORE_REFS, storeRefs);
}
@@ -400,6 +400,7 @@ public abstract class AbstractReindexComponent implements IndexRecovery
* @param txn a specific transaction
* @return Returns true if the transaction is definitely in the index
*/
+ @SuppressWarnings("unchecked")
public InIndex isTxnPresentInIndex(final Transaction txn)
{
if (txn == null)
@@ -413,72 +414,180 @@ public abstract class AbstractReindexComponent implements IndexRecovery
logger.trace("Checking for transaction in index: " + txnId);
}
- // Check if the txn ID is present in any store's index
- boolean foundInIndex = false;
- List storeRefs = getAdmStoreRefs();
- for (StoreRef storeRef : storeRefs)
+
+ // Let's scan the changes for this transaction, and group together changes for applicable stores
+ List nodeStatuses = nodeDAO.getTxnChanges(txnId);
+ Set admStoreRefs = getAdmStoreRefs();
+ Map> storeStatusMap = new HashMap>(admStoreRefs.size() * 2);
+ for (NodeRef.Status nodeStatus : nodeStatuses)
{
- boolean inStore = isTxnIdPresentInIndex(storeRef, txn);
- if (inStore)
+ StoreRef storeRef = nodeStatus.getNodeRef().getStoreRef();
+ if (admStoreRefs.contains(storeRef))
{
- // found in a particular store
- foundInIndex = true;
- break;
+ List storeStatuses = storeStatusMap.get(storeRef);
+ if (storeStatuses == null)
+ {
+ storeStatuses = new LinkedList();
+ storeStatusMap.put(storeRef, storeStatuses);
+ }
+ storeStatuses.add(nodeStatus);
}
}
- InIndex result = InIndex.NO;
- if (!foundInIndex)
+
+ // Default decision is indeterminate, unless all established to be in index (YES) or one established to be missing (NO)
+ InIndex result = InIndex.INDETERMINATE;
+
+ // Check if the txn ID is present in every applicable store's index
+ for (Map.Entry> entry : storeStatusMap.entrySet())
{
- // If none of the stores have the transaction, then that might be because it consists of 0 modifications
- int updateCount = nodeDAO.getTxnUpdateCount(txnId);
-
- if ((updateCount > 0) && (! allUpdatedNodesCanBeIgnored(txnId)))
+ StoreRef storeRef = entry.getKey();
+ List storeStatuses = entry.getValue();
+
+ // Establish the number of deletes and updates for this storeRef
+ int deleteCount = 0;
+ int updateCount = 0;
+ for (NodeRef.Status nodeStatus : storeStatuses)
{
- // There were updates, but there is no sign in the indexes
- result = InIndex.NO;
- }
- else
- {
- // We're now in the case where there were no updates
- int deleteCount = nodeDAO.getTxnDeleteCount(txnId);
- if (deleteCount == 0)
+ if (nodeStatus.isDeleted())
{
- // There are no updates or deletes and no entry in the indexes.
- // There are outdated nodes in the index.
- result = InIndex.INDETERMINATE;
+ deleteCount++;
}
else
{
- // There were deleted nodes only. Check that all the deleted nodes were
- // removed from the index otherwise it is out of date.
- // If all nodes have been removed from the index then the result is that the index is OK
- // ETWOTWO-1387
- // ALF-1989 - even if the nodes have not been found it is no good to use for AUTO index checking
- result = InIndex.INDETERMINATE;
- for (StoreRef storeRef : storeRefs)
+ updateCount++;
+ }
+ }
+
+ if (updateCount > 0)
+ {
+ // Check the index
+ if (isTxnIdPresentInIndex(storeRef, txn))
+ {
+ result = InIndex.YES;
+ }
+ // There were updates, but there is no sign in the indexes
+ else
+ {
+ result = InIndex.NO;
+ break;
+ }
+ }
+ // There were deleted nodes only. Check that all the deleted nodes were removed from the index otherwise it
+ // is out of date. If all nodes have been removed from the index then the result is that the index is OK
+ // ETWOTWO-1387
+ // ALF-1989 - even if the nodes have not been found it is no good to use for AUTO index checking
+ else if (deleteCount > 0 && !haveNodesBeenRemovedFromIndex(storeRef, storeStatuses, txn))
+ {
+ result = InIndex.NO;
+ break;
+ }
+ }
+
+ // done
+ if (logger.isDebugEnabled())
+ {
+ if (result == InIndex.NO)
+ {
+ logger.debug("Transaction " + txnId + " not present in indexes");
+
+ logger.debug(nodeStatuses.size() + " nodes in DB transaction");
+ for (NodeRef.Status nodeStatus : nodeStatuses)
+ {
+ NodeRef nodeRef = nodeStatus.getNodeRef();
+ if (nodeStatus.isDeleted())
{
- if (!haveNodesBeenRemovedFromIndex(storeRef, txn))
+ logger.debug(" DELETED TX " + nodeStatus.getChangeTxnId() + ": " + nodeRef);
+ }
+ else
+ {
+ logger.debug(" UPDATED / MOVED TX " + nodeStatus.getChangeTxnId() + ": " + nodeRef);
+ logger.debug(" " + nodeService.getProperties(nodeRef));
+ }
+ ResultSet results = null;
+ SearchParameters sp = new SearchParameters();
+ sp.setLanguage(SearchService.LANGUAGE_LUCENE);
+ sp.addStore(nodeRef.getStoreRef());
+ try
+ {
+ sp.setQuery("ID:" + LuceneQueryParser.escape(nodeRef.toString()));
+
+ results = searcher.query(sp);
+ for (ResultSetRow row : results)
{
- result = InIndex.NO;
- break;
+ StringBuilder builder = new StringBuilder(1024).append(" STILL INDEXED: {");
+ Document lrsDoc = ((LuceneResultSetRow) row).getDocument();
+ Iterator fields = ((List) lrsDoc.getFields()).iterator();
+ if (fields.hasNext())
+ {
+ Field field = fields.next();
+ builder.append(field.name()).append("=").append(field.stringValue());
+ while (fields.hasNext())
+ {
+ field = fields.next();
+ builder.append(", ").append(field.name()).append("=").append(field.stringValue());
+ }
+ }
+ builder.append("}");
+ logger.debug(builder.toString());
}
}
+ finally
+ {
+ if (results != null) { results.close(); }
+ }
+ try
+ {
+ sp.setQuery("FTSREF:" + LuceneQueryParser.escape(nodeRef.toString()));
+
+ results = searcher.query(sp);
+ for (ResultSetRow row : results)
+ {
+ StringBuilder builder = new StringBuilder(1024).append(" FTSREF: {");
+ Document lrsDoc = ((LuceneResultSetRow) row).getDocument();
+ Iterator fields = ((List) lrsDoc.getFields()).iterator();
+ if (fields.hasNext())
+ {
+ Field field = fields.next();
+ builder.append(field.name()).append("=").append(field.stringValue());
+ while (fields.hasNext())
+ {
+ field = fields.next();
+ builder.append(", ").append(field.name()).append("=").append(field.stringValue());
+ }
+ }
+ builder.append("}");
+ logger.debug(builder.toString());
+ }
+ }
+ finally
+ {
+ if (results != null) { results.close(); }
+ }
}
}
- }
- else
- {
- result = InIndex.YES;
- }
-
- // done
- if (logger.isDebugEnabled())
- {
- logger.debug("Transaction " + txnId + " present in indexes: " + result);
+ else
+ {
+ if (logger.isTraceEnabled())
+ {
+ logger.trace("Transaction " + txnId + " present in indexes: " + result);
+ }
+ }
}
return result;
}
+ public InIndex isTxnPresentInIndex(final Transaction txn, final boolean readThrough)
+ {
+ return transactionService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback()
+ {
+ @Override
+ public InIndex execute() throws Throwable
+ {
+ return isTxnPresentInIndex(txn);
+ }
+ }, true, readThrough);
+ }
+
/**
* @return Returns true if the given transaction is present in the index
*/
@@ -522,39 +631,10 @@ public abstract class AbstractReindexComponent implements IndexRecovery
}
}
- protected boolean allUpdatedNodesCanBeIgnored(Long txnId)
- {
- ParameterCheck.mandatory("txnId", txnId);
-
- boolean allUpdatedNodesCanBeIgnored = false;
-
- List nodeStatuses = nodeDAO.getTxnChanges(txnId);
-
- allUpdatedNodesCanBeIgnored = true;
- for (NodeRef.Status nodeStatus : nodeStatuses)
- {
- NodeRef nodeRef = nodeStatus.getNodeRef();
- if (! nodeStatus.isDeleted())
- {
- // updated node (ie. not deleted)
- StoreRef storeRef = nodeRef.getStoreRef();
- if (!isIgnorableStore(storeRef))
- {
- allUpdatedNodesCanBeIgnored = false;
- break;
- }
- }
- }
-
- return allUpdatedNodesCanBeIgnored;
- }
-
- private boolean haveNodesBeenRemovedFromIndex(final StoreRef storeRef, final Transaction txn)
+ private boolean haveNodesBeenRemovedFromIndex(final StoreRef storeRef, List nodeStatuses, final Transaction txn)
{
final Long txnId = txn.getId();
// there have been deletes, so we have to ensure that none of the nodes deleted are present in the index
- // get all node refs for the transaction
- List nodeStatuses = nodeDAO.getTxnChangesForStore(storeRef, txnId);
boolean foundNodeRef = false;
for (NodeRef.Status nodeStatus : nodeStatuses)
{
@@ -658,18 +738,41 @@ public abstract class AbstractReindexComponent implements IndexRecovery
throw new AlfrescoRuntimeException("Reindex work must be done in the context of a read-only transaction");
}
- // get the node references pertinent to the transaction
- List nodeStatuses = nodeDAO.getTxnChanges(txnId);
+ // The indexer will 'read through' to the latest database changes for the rest of this transaction
+ indexer.setReadThrough(true);
+
+ // get the node references pertinent to the transaction - We need to 'read through' here too
+ List> nodePairs = transactionService.getRetryingTransactionHelper().doInTransaction(
+ new RetryingTransactionCallback>>()
+ {
+
+ @Override
+ public List> execute() throws Throwable
+ {
+ List nodeStatuses = nodeDAO.getTxnChanges(txnId);
+ List> nodePairs = new ArrayList>(nodeStatuses.size());
+ for (NodeRef.Status nodeStatus : nodeStatuses)
+ {
+ if (nodeStatus == null)
+ {
+ // it's not there any more
+ continue;
+ }
+
+ ChildAssociationRef parent = nodeStatus.isDeleted() ? null : nodeService.getPrimaryParent(nodeStatus.getNodeRef());
+ nodePairs.add(new Pair(nodeStatus, parent));
+ }
+ return nodePairs;
+ }
+ }, true, true);
+
// reindex each node
int nodeCount = 0;
- for (NodeRef.Status nodeStatus : nodeStatuses)
+ for (Pair nodePair: nodePairs)
{
+ NodeRef.Status nodeStatus = nodePair.getFirst();
NodeRef nodeRef = nodeStatus.getNodeRef();
- if (nodeStatus == null)
- {
- // it's not there any more
- continue;
- }
+
if (nodeStatus.isDeleted()) // node deleted
{
if(isFull == false)
@@ -681,6 +784,10 @@ public abstract class AbstractReindexComponent implements IndexRecovery
null,
nodeRef);
indexer.deleteNode(assocRef);
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("DELETE: " + nodeRef);
+ }
}
}
else // node created
@@ -693,11 +800,31 @@ public abstract class AbstractReindexComponent implements IndexRecovery
null,
nodeRef);
indexer.createNode(assocRef);
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("CREATE: " + nodeRef);
+ }
}
else
{
- // reindex
- indexer.updateNode(nodeRef);
+ // reindex - force a cascade reindex if possible (to account for a possible move)
+ ChildAssociationRef parent = nodePair.getSecond();
+ if (parent == null)
+ {
+ indexer.updateNode(nodeRef);
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("UPDATE: " + nodeRef);
+ }
+ }
+ else
+ {
+ indexer.createChildRelationship(parent);
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("MOVE: " + nodeRef + ", " + parent);
+ }
+ }
}
}
// Make the callback
diff --git a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java
index 69e6688fc9..cfe6d49d38 100644
--- a/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java
+++ b/source/java/org/alfresco/repo/node/index/FullIndexRecoveryComponent.java
@@ -241,12 +241,13 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
for (Transaction txn : txns)
{
count++;
- if (isTxnPresentInIndex(txn) == InIndex.NO)
+ InIndex inIndex = isTxnPresentInIndex(txn, true);
+ if (inIndex == InIndex.NO)
{
// Missing txn
return InIndex.NO;
}
- if (isTxnPresentInIndex(txn) == InIndex.YES)
+ if (inIndex == InIndex.YES)
{
yesCount++;
if((yesCount > 1) && (count >= 10))
@@ -265,7 +266,7 @@ public class FullIndexRecoveryComponent extends AbstractReindexComponent
for (Transaction txn : txns)
{
count++;
- current = isTxnPresentInIndex(txn);
+ current = isTxnPresentInIndex(txn, true);
if (current == InIndex.NO)
{
// Missing txn
diff --git a/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java b/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java
index 72d7ef960e..75ea7fbd1e 100644
--- a/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java
+++ b/source/java/org/alfresco/repo/node/index/IndexTransactionTracker.java
@@ -524,7 +524,7 @@ found:
{
continue;
}
- else if ((inIndex = isTxnPresentInIndex(txn)) != InIndex.NO)
+ else if ((inIndex = isTxnPresentInIndex(txn, true)) != InIndex.NO)
{
if (logger.isDebugEnabled())
{
@@ -645,7 +645,7 @@ found:
// Reindex the transaction if we are forcing it or if it isn't in the index already
InIndex inIndex = InIndex.INDETERMINATE;
- if (forceReindex || (inIndex = isTxnPresentInIndex(txn)) == InIndex.NO)
+ if (forceReindex || (inIndex = isTxnPresentInIndex(txn, true)) == InIndex.NO)
{
// From this point on, until the tracker has caught up, all transactions need to be indexed
forceReindex = true;
diff --git a/source/java/org/alfresco/repo/search/Indexer.java b/source/java/org/alfresco/repo/search/Indexer.java
index 8282a4bd1a..c21111b934 100644
--- a/source/java/org/alfresco/repo/search/Indexer.java
+++ b/source/java/org/alfresco/repo/search/Indexer.java
@@ -110,5 +110,14 @@ public interface Indexer
public void deleteIndex(StoreRef storeRef);
- public void flushPending();
+ public void flushPending();
+
+ /**
+ * Activates 'read through' behaviour for this indexer. Rather than accessing the database through the current
+ * (potentially old) transaction, it will use a discrete read only transaction for each node it indexes. This avoids
+ * 'stale' nodes building up in the caches during long reindex runs.
+ *
+ * @param isReadThrough
+ */
+ public void setReadThrough(boolean isReadThrough);
}
diff --git a/source/java/org/alfresco/repo/search/IndexerComponent.java b/source/java/org/alfresco/repo/search/IndexerComponent.java
index 216e234e3f..50de7a0059 100644
--- a/source/java/org/alfresco/repo/search/IndexerComponent.java
+++ b/source/java/org/alfresco/repo/search/IndexerComponent.java
@@ -19,6 +19,7 @@
package org.alfresco.repo.search;
import org.alfresco.repo.service.StoreRedirectorProxyFactory;
+import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
@@ -40,6 +41,7 @@ public class IndexerComponent extends AbstractLifecycleBean implements Indexer
{
private StoreRedirectorProxyFactory storeRedirectorProxyFactory;
private IndexerAndSearcher indexerAndSearcherFactory;
+ private static final String KEY_READ_THROUGH = IndexerComponent.class.getName() + "READ_THROUGH";
public void setStoreRedirectorProxyFactory(StoreRedirectorProxyFactory storeRedirectorProxyFactory)
{
@@ -62,44 +64,58 @@ public class IndexerComponent extends AbstractLifecycleBean implements Indexer
this.indexerAndSearcherFactory = indexerAndSearcherFactory;
}
+ public void setReadThrough(boolean isReadThrough)
+ {
+ if (isReadThrough)
+ {
+ AlfrescoTransactionSupport.bindResource(KEY_READ_THROUGH, Boolean.TRUE);
+ }
+ else
+ {
+ AlfrescoTransactionSupport.unbindResource(KEY_READ_THROUGH);
+ }
+ }
+
+ private Indexer getIndexer(StoreRef storeRef)
+ {
+ Indexer indexer = indexerAndSearcherFactory.getIndexer(storeRef);
+ indexer.setReadThrough(AlfrescoTransactionSupport.getResource(KEY_READ_THROUGH) == Boolean.TRUE);
+ return indexer;
+ }
+
public void createNode(ChildAssociationRef relationshipRef)
{
- Indexer indexer = indexerAndSearcherFactory.getIndexer(
- relationshipRef.getChildRef().getStoreRef());
+ Indexer indexer = getIndexer(relationshipRef.getChildRef().getStoreRef());
indexer.createNode(relationshipRef);
}
public void updateNode(NodeRef nodeRef)
{
- Indexer indexer = indexerAndSearcherFactory.getIndexer(nodeRef.getStoreRef());
+ Indexer indexer = getIndexer(nodeRef.getStoreRef());
indexer.updateNode(nodeRef);
}
public void deleteNode(ChildAssociationRef relationshipRef)
{
- Indexer indexer = indexerAndSearcherFactory.getIndexer(
- relationshipRef.getChildRef().getStoreRef());
+ Indexer indexer = getIndexer(relationshipRef.getChildRef().getStoreRef());
indexer.deleteNode(relationshipRef);
}
public void createChildRelationship(ChildAssociationRef relationshipRef)
{
- Indexer indexer = indexerAndSearcherFactory.getIndexer(
- relationshipRef.getChildRef().getStoreRef());
+ Indexer indexer = getIndexer(relationshipRef.getChildRef().getStoreRef());
indexer.createChildRelationship(relationshipRef);
}
public void updateChildRelationship(ChildAssociationRef relationshipBeforeRef, ChildAssociationRef relationshipAfterRef)
{
- Indexer indexer = indexerAndSearcherFactory.getIndexer(
- relationshipBeforeRef.getChildRef().getStoreRef());
+ Indexer indexer = getIndexer(relationshipBeforeRef.getChildRef().getStoreRef());
indexer.updateChildRelationship(relationshipBeforeRef, relationshipAfterRef);
}
public void deleteChildRelationship(ChildAssociationRef relationshipRef)
{
- Indexer indexer = indexerAndSearcherFactory.getIndexer(
- relationshipRef.getChildRef().getStoreRef());
+ Indexer indexer = getIndexer(relationshipRef.getChildRef().getStoreRef());
indexer.deleteChildRelationship(relationshipRef);
}
@@ -108,7 +124,7 @@ public class IndexerComponent extends AbstractLifecycleBean implements Indexer
*/
public void deleteIndex(StoreRef storeRef)
{
- Indexer indexer = indexerAndSearcherFactory.getIndexer(storeRef);
+ Indexer indexer = getIndexer(storeRef);
indexer.deleteIndex(storeRef);
}
diff --git a/source/java/org/alfresco/repo/search/impl/NoActionIndexer.java b/source/java/org/alfresco/repo/search/impl/NoActionIndexer.java
index e88e922f36..bb7f4dbdf1 100644
--- a/source/java/org/alfresco/repo/search/impl/NoActionIndexer.java
+++ b/source/java/org/alfresco/repo/search/impl/NoActionIndexer.java
@@ -34,6 +34,10 @@ import org.alfresco.service.cmr.repository.StoreRef;
*/
public class NoActionIndexer implements Indexer
{
+ public void setReadThrough(boolean isReadThrough)
+ {
+ return;
+ }
public void createNode(ChildAssociationRef relationshipRef)
{
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerAndSearcherFactory.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerAndSearcherFactory.java
index 70a08c509a..d7ac74c972 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerAndSearcherFactory.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerAndSearcherFactory.java
@@ -20,17 +20,16 @@ package org.alfresco.repo.search.impl.lucene;
import java.util.List;
-import org.alfresco.cmis.CMISQueryService;
import org.alfresco.repo.search.SearcherException;
import org.alfresco.repo.search.SupportsBackgroundIndexing;
import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer;
-import org.alfresco.repo.search.impl.querymodel.QueryEngine;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
+import org.alfresco.service.transaction.TransactionService;
/**
* Factory for ADM indxers and searchers
@@ -48,6 +47,8 @@ public class ADMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAnd
protected FullTextSearchIndexer fullTextSearchIndexer;
protected ContentService contentService;
+
+ protected TransactionService transactionService;
/**
* Set the dictinary service
@@ -90,6 +91,11 @@ public class ADMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAnd
{
this.contentService = contentService;
}
+
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ }
protected LuceneIndexer createIndexer(StoreRef storeRef, String deltaId)
{
@@ -102,6 +108,7 @@ public class ADMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAnd
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setFullTextSearchIndexer(fullTextSearchIndexer);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
indexer.setMaxAtomicTransformationTime(getMaxTransformationTime());
return indexer;
}
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java
index a98377f338..05a7526a56 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneIndexerImpl.java
@@ -29,9 +29,12 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.Date;
+import java.util.Deque;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
@@ -130,7 +133,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
/**
* A list of stuff that requires non atomic indexing
*/
- private ArrayList toFTSIndex = new ArrayList();
+ private Map> toFTSIndex = Collections.emptyMap();
/**
* Default construction
@@ -427,11 +430,14 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
mainSearcher = new IndexSearcher(mainReader);
deltaSearcher = new IndexSearcher(deltaReader);
- for (Helper helper : toFTSIndex)
+ for (Map.Entry> entry : toFTSIndex.entrySet())
{
- // Delete both the document and the supplementary FTSSTATUS document (if there is one)
- deletions.add(helper.ref);
- deletions.add(helper.id);
+ // Delete both the document and the supplementary FTSSTATUS documents (if there are any)
+ deletions.add(entry.getKey());
+ for (Helper helper : entry.getValue())
+ {
+ deletions.add(helper.id);
+ }
}
}
@@ -570,7 +576,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
}
}
- public List createDocuments(final String stringNodeRef, final boolean isNew, final boolean indexAllProperties, final boolean includeDirectoryDocuments)
+ public List createDocuments(final String stringNodeRef, final FTSStatus ftsStatus, final boolean indexAllProperties, final boolean includeDirectoryDocuments)
{
if (tenantService.isEnabled() && ((AuthenticationUtil.getRunAsUser() == null) || (AuthenticationUtil.isRunAsUserTheSystemUser())))
{
@@ -580,25 +586,36 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
{
public List doWork()
{
- return createDocumentsImpl(stringNodeRef, isNew, indexAllProperties, includeDirectoryDocuments);
+ return createDocumentsImpl(stringNodeRef, ftsStatus, indexAllProperties, includeDirectoryDocuments);
}
}, tenantService.getDomainUser(AuthenticationUtil.getSystemUserName(), tenantService.getDomain(new NodeRef(stringNodeRef).getStoreRef().getIdentifier())));
}
else
{
- return createDocumentsImpl(stringNodeRef, isNew, indexAllProperties, includeDirectoryDocuments);
+ return createDocumentsImpl(stringNodeRef, ftsStatus, indexAllProperties, includeDirectoryDocuments);
}
}
- private List createDocumentsImpl(String stringNodeRef, boolean isNew, boolean indexAllProperties, boolean includeDirectoryDocuments)
+ private List createDocumentsImpl(String stringNodeRef, FTSStatus ftsStatus, boolean indexAllProperties, boolean includeDirectoryDocuments)
{
NodeRef nodeRef = new NodeRef(stringNodeRef);
+ NodeRef.Status nodeStatus = nodeService.getNodeStatus(nodeRef); // DH: Let me know if this field gets dropped (performance)
+ List docs = new LinkedList();
+ if (nodeStatus == null)
+ {
+ throw new InvalidNodeRefException("Node does not exist: " + nodeRef, nodeRef);
+ }
+ else if (nodeStatus.isDeleted())
+ {
+ // If we are being called in non FTS mode on a deleted node, we must still create a new FTS marker
+ // document, in case FTS is currently in progress and about to restore our node!
+ addFtsStatusDoc(docs, ftsStatus, nodeRef, nodeStatus);
+ return docs;
+ }
Map nodeCounts = getNodeCounts(nodeRef);
- List docs = new ArrayList();
ChildAssociationRef qNameRef = null;
Map properties = nodeService.getProperties(nodeRef);
- NodeRef.Status nodeStatus = nodeService.getNodeStatus(nodeRef); // DH: Let me know if this field gets dropped (performance)
Collection directPaths = new LinkedHashSet(nodeService.getPaths(nodeRef, false));
Collection> categoryPaths = getCategoryPaths(nodeRef, properties);
@@ -782,19 +799,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
// forget about it if FTS is already in progress for an earlier transaction!
if (!isAtomic && !indexAllProperties)
{
- Document ftsStatus = new Document();
- ftsStatus.add(new Field("ID", GUID.generate(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
- ftsStatus.add(new Field("FTSREF", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
- ftsStatus.add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
- if (isNew)
- {
- ftsStatus.add(new Field("FTSSTATUS", "New", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
- }
- else
- {
- ftsStatus.add(new Field("FTSSTATUS", "Dirty", Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
- }
- docs.add(ftsStatus);
+ addFtsStatusDoc(docs, ftsStatus, nodeRef, nodeStatus);
}
// {
@@ -805,6 +810,24 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
return docs;
}
+ private void addFtsStatusDoc(List docs, FTSStatus ftsStatus, NodeRef nodeRef,
+ NodeRef.Status nodeStatus)
+ {
+ // If we are being called during FTS failover, then don't bother generating a new doc
+ if (ftsStatus == FTSStatus.Clean)
+ {
+ return;
+ }
+ Document doc = new Document();
+ doc.add(new Field("ID", GUID.generate(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
+ doc.add(new Field("FTSREF", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO));
+ doc
+ .add(new Field("TX", nodeStatus.getChangeTxnId(), Field.Store.YES, Field.Index.NO_NORMS,
+ Field.TermVector.NO));
+ doc.add(new Field("FTSSTATUS", ftsStatus.name(), Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO));
+ docs.add(doc);
+ }
+
private Serializable convertForMT(QName propertyName, Serializable inboundValue)
{
if (!tenantService.isEnabled())
@@ -1561,9 +1584,7 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
// }
try
{
- NodeRef lastId = null;
-
- toFTSIndex = new ArrayList(size);
+ toFTSIndex = new LinkedHashMap>(size * 2);
BooleanQuery booleanQuery = new BooleanQuery();
booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "Dirty")), Occur.SHOULD);
booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "New")), Occur.SHOULD);
@@ -1596,20 +1617,18 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
// For backward compatibility with existing indexes, cope with FTSSTATUS being stored directly on
// the real document without an FTSREF
Field ftsRef = doc.getField("FTSREF");
- Field id = doc.getField("ID");
- Helper helper;
- if (ftsRef == null)
+ String id = doc.getField("ID").stringValue();
+ String ref = ftsRef == null ? id : ftsRef.stringValue();
+ Helper helper = new Helper(id, doc.getField("TX").stringValue());
+ Deque helpers = toFTSIndex.get(ref);
+ if (helpers == null)
{
- // Old style - we only have a node ref
- helper = new Helper(id.stringValue(), id.stringValue(), doc.getField("TX").stringValue());
+ helpers = new LinkedList();
+ toFTSIndex.put(ref, helpers);
+ count++;
}
- else
- {
- // New style - we have a unique FTS ID and a noderef
- helper = new Helper(id.stringValue(), ftsRef.stringValue(), doc.getField("TX").stringValue());
- }
- toFTSIndex.add(helper);
- if (++count >= size)
+ helpers.add(helper);
+ if (count >= size)
{
break;
}
@@ -1640,24 +1659,24 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
try
{
writer = getDeltaWriter();
- for (Helper helper : toFTSIndex)
+ int done = 0;
+ for (Map.Entry> entry : toFTSIndex.entrySet())
{
// Document document = helper.document;
- NodeRef ref = new NodeRef(helper.ref);
- // bypass nodes that have disappeared
- if (!nodeService.exists(ref))
- {
- continue;
- }
-
- // bypass out of date transactions
- NodeRef.Status nodeStatus = nodeService.getNodeStatus(ref);
- if (nodeStatus == null || !helper.tx.equals(nodeStatus.getChangeTxnId()))
- {
- continue;
- }
+ NodeRef ref = new NodeRef(entry.getKey());
+ done += entry.getValue().size();
- List docs = createDocuments(ref.toString(), false, true, false);
+ List docs;
+ try
+ {
+ docs = readDocuments(ref.toString(), FTSStatus.Clean, true, false);
+ }
+ catch (Throwable t)
+ {
+ // Try to recover from failure
+ s_logger.error("FTS index of " + ref + " failed. Reindexing without FTS", t);
+ docs = readDocuments(ref.toString(), FTSStatus.Clean, false, false);
+ }
for (Document doc : docs)
{
try
@@ -1672,23 +1691,12 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
}
}
- // Need to do all the current id in the TX - should all
- // be
- // together so skip until id changes
if (writer.docCount() > size)
{
- if (lastId == null)
- {
- lastId = ref;
- }
- if (!lastId.equals(ref))
- {
- break;
- }
+ break;
}
}
- int done = writer.docCount();
remainingCount = count - done;
return done;
}
@@ -1726,14 +1734,11 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
{
String id;
- String ref;
-
String tx;
- Helper(String id, String ref, String tx)
+ Helper(String id, String tx)
{
this.id = id;
- this.ref = ref;
this.tx = tx;
}
}
@@ -1747,7 +1752,6 @@ public class ADMLuceneIndexerImpl extends AbstractLuceneIndexerImpl imp
protected void doPrepare() throws IOException
{
- saveDelta();
flushPending();
// prepareToMergeIntoMain();
}
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java
index 54858b2889..188ea2c239 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java
@@ -3730,6 +3730,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
indexer.prepare();
indexer.commit();
@@ -3755,6 +3756,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
// //indexer.clearIndex();
@@ -3937,6 +3939,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
// indexer.clearIndex();
indexer.createNode(new ChildAssociationRef(null, null, null, rootNodeRef));
indexer.createNode(new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, QName.createQName("{namespace}one"), n1));
@@ -6418,6 +6421,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
indexer.updateNode(rootNodeRef);
indexer.updateNode(n1);
@@ -6677,6 +6681,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
ChildAssociationRef car = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}" + COMPLEX_LOCAL_NAME), testSuperType);
indexer.createNode(car);
@@ -6708,6 +6713,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
ChildAssociationRef car = nodeService.createNode(rootNodeRef, ContentModel.ASSOC_CHILDREN, QName.createQName("{namespace}" + COMPLEX_LOCAL_NAME), testSuperType);
indexer.createNode(car);
@@ -6740,6 +6746,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
testTX = transactionService.getUserTransaction();
testTX.begin();
@@ -6957,6 +6964,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
nodeService.removeChild(n2, n13);
indexer.deleteChildRelationship(new ChildAssociationRef(ASSOC_TYPE_QNAME, n2, QName.createQName("{namespace}link"), n13));
@@ -7209,6 +7217,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
nodeService.removeChild(n2, n13);
nodeService.addChild(n2, n13, ASSOC_TYPE_QNAME, QName.createQName("{namespace}renamed_link"));
@@ -7272,6 +7281,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
indexer.updateFullTextSearch(1000);
indexer.prepare();
indexer.commit();
@@ -7341,6 +7351,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
indexer.updateFullTextSearch(1000);
indexer.prepare();
indexer.commit();
@@ -7742,6 +7753,7 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener
indexer.setTenantService(tenantService);
indexer.setFullTextSearchIndexer(luceneFTS);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
if (clear)
{
// indexer.clearIndex();
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneUnIndexedIndexAndSearcherFactory.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneUnIndexedIndexAndSearcherFactory.java
index 6a140ec32d..9d81fe9601 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneUnIndexedIndexAndSearcherFactory.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneUnIndexedIndexAndSearcherFactory.java
@@ -33,6 +33,7 @@ public class ADMLuceneUnIndexedIndexAndSearcherFactory extends ADMLuceneIndexerA
// indexer.setLuceneIndexLock(luceneIndexLock);
indexer.setFullTextSearchIndexer(fullTextSearchIndexer);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
indexer.setMaxAtomicTransformationTime(getMaxTransformationTime());
return indexer;
}
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerAndSearcherFactory.java b/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerAndSearcherFactory.java
index 519f50527e..3be202a280 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerAndSearcherFactory.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerAndSearcherFactory.java
@@ -37,6 +37,7 @@ import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
+import org.alfresco.service.transaction.TransactionService;
/**
* Factory for AVM indexers and searchers
@@ -49,6 +50,8 @@ public class AVMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAnd
private DictionaryService dictionaryService;
private NamespaceService nameSpaceService;
private ContentService contentService;
+ protected TransactionService transactionService;
+
private AVMService avmService;
private AVMSyncService avmSyncService;
private NodeService nodeService;
@@ -85,6 +88,11 @@ public class AVMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAnd
this.contentService = contentService;
}
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ }
+
/**
* Set the AVM service
*/
@@ -131,6 +139,7 @@ public class AVMLuceneIndexerAndSearcherFactory extends AbstractLuceneIndexerAnd
AVMLuceneIndexerImpl indexer = AVMLuceneIndexerImpl.getUpdateIndexer(storeRef, deltaId, this);
indexer.setDictionaryService(dictionaryService);
indexer.setContentService(contentService);
+ indexer.setTransactionService(transactionService);
indexer.setMaxAtomicTransformationTime(getMaxTransformationTime());
indexer.setAvmService(avmService);
indexer.setAvmSyncService(avmSyncService);
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java b/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java
index 7cd3ad2627..231be2f170 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/AVMLuceneIndexerImpl.java
@@ -431,7 +431,7 @@ public class AVMLuceneIndexerImpl extends AbstractLuceneIndexerImpl impl
}
@Override
- protected List createDocuments(String stringNodeRef, boolean isNew, boolean indexAllProperties, boolean includeDirectoryDocuments)
+ protected List createDocuments(String stringNodeRef, FTSStatus ftsStatus, boolean indexAllProperties, boolean includeDirectoryDocuments)
{
List docs = new ArrayList();
if (stringNodeRef.startsWith("\u0000"))
@@ -1233,14 +1233,13 @@ public class AVMLuceneIndexerImpl extends AbstractLuceneIndexerImpl impl
protected void doPrepare() throws IOException
{
AuthenticationUtil.runAs(new RunAsWork()
- {
+ {
public String doWork() throws Exception
- {
- saveDelta();
+ {
flushPending();
return null;
}
- }, AuthenticationUtil.getSystemUserName());
+ }, AuthenticationUtil.getSystemUserName());
}
@Override
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerImpl.java b/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerImpl.java
index b99c2f0945..2af659a046 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerImpl.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/AbstractLuceneIndexerImpl.java
@@ -33,7 +33,9 @@ import javax.transaction.xa.XAResource;
import org.alfresco.repo.search.IndexerException;
import org.alfresco.repo.search.impl.lucene.index.TransactionStatus;
+import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.InvalidNodeRefException;
+import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.document.Document;
@@ -41,6 +43,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
+import org.springframework.dao.ConcurrencyFailureException;
/**
* Common support for indexing across implementations
@@ -93,9 +96,25 @@ public abstract class AbstractLuceneIndexerImpl extends AbstractLuceneBase
protected enum IndexDeleteMode {REINDEX, DELETE, MOVE};
- protected long docs;
+ protected enum FTSStatus {New, Dirty, Clean};
- // Failure codes to index when problems occur indexing content
+ protected long docs;
+
+ // An indexer with read through activated can only see already-committed documents in the database. Useful when
+ // reindexing lots of old documents and not wanting to pollute the caches with stale versions of nodes.
+ private boolean isReadThrough;
+
+ protected TransactionService transactionService;
+
+ public void setReadThrough(boolean isReadThrough)
+ {
+ this.isReadThrough = isReadThrough;
+ }
+
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ }
protected static class Command
{
@@ -157,7 +176,6 @@ public abstract class AbstractLuceneIndexerImpl extends AbstractLuceneBase
/**
* Logger
*/
- @SuppressWarnings("unused")
private static Log s_logger = LogFactory.getLog(AbstractLuceneIndexerImpl.class);
protected static Set deletePrimary(Collection nodeRefs, IndexReader reader, boolean delete)
@@ -611,9 +629,41 @@ public abstract class AbstractLuceneIndexerImpl extends AbstractLuceneBase
protected abstract void doSetRollbackOnly() throws IOException;
- protected abstract List createDocuments(String stringNodeRef, boolean isNew, boolean indexAllProperties,
+ protected abstract List createDocuments(String stringNodeRef, FTSStatus ftsStatus, boolean indexAllProperties,
boolean includeDirectoryDocuments);
+ protected List readDocuments(final String stringNodeRef, final FTSStatus ftsStatus,
+ final boolean indexAllProperties, final boolean includeDirectoryDocuments)
+ {
+ if (isReadThrough)
+ {
+ return transactionService.getRetryingTransactionHelper().doInTransaction(
+ new RetryingTransactionCallback>()
+ {
+ @Override
+ public List execute() throws Throwable
+ {
+ try
+ {
+ return createDocuments(stringNodeRef, ftsStatus, indexAllProperties,
+ includeDirectoryDocuments);
+ }
+ catch (InvalidNodeRefException e)
+ {
+ // Turn InvalidNodeRefExceptions into retryable exceptions.
+ throw new ConcurrencyFailureException(
+ "Possible cache integrity issue during reindexing", e);
+ }
+
+ }
+ }, true, true);
+ }
+ else
+ {
+ return createDocuments(stringNodeRef, ftsStatus, indexAllProperties, includeDirectoryDocuments);
+ }
+ }
+
protected Set deleteImpl(String nodeRef, IndexDeleteMode mode, boolean cascade, IndexReader mainReader)
throws LuceneIndexException, IOException
@@ -741,7 +791,7 @@ public abstract class AbstractLuceneIndexerImpl extends AbstractLuceneBase
try
{
- List docs = createDocuments(nodeRef, isNew, false, true);
+ List docs = readDocuments(nodeRef, isNew ? FTSStatus.New : FTSStatus.Dirty, false, true);
for (Document doc : docs)
{
try
@@ -867,11 +917,18 @@ public abstract class AbstractLuceneIndexerImpl extends AbstractLuceneBase
*/
public void flushPending() throws LuceneIndexException
{
- // Make sure the in flush deletion list is clear at the start
- deletionsSinceFlush.clear();
IndexReader mainReader = null;
try
{
+ saveDelta();
+
+ // Make sure the in flush deletion list is clear at the start
+ deletionsSinceFlush.clear();
+ if (commandList.isEmpty())
+ {
+ return;
+ }
+
mainReader = getReader();
Set forIndex = new LinkedHashSet();
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/FilterIndexReaderByStringId.java b/source/java/org/alfresco/repo/search/impl/lucene/FilterIndexReaderByStringId.java
index 118b648e4a..d92a5d93b6 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/FilterIndexReaderByStringId.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/FilterIndexReaderByStringId.java
@@ -19,8 +19,9 @@
package org.alfresco.repo.search.impl.lucene;
import java.io.IOException;
-import java.util.BitSet;
import java.util.Set;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.alfresco.error.AlfrescoRuntimeException;
import org.apache.commons.logging.Log;
@@ -49,9 +50,12 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
{
private static Log s_logger = LogFactory.getLog(FilterIndexReaderByStringId.class);
- OpenBitSet deletedDocuments;
+ private OpenBitSet deletedDocuments;
+ private final Set deletions;
+ private final boolean deleteNodesOnly;
+ private final ReadWriteLock lock = new ReentrantReadWriteLock();
- private String id;
+ private final String id;
/**
* Apply the filter
@@ -66,21 +70,44 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
super(reader);
reader.incRef();
this.id = id;
+ this.deletions = deletions;
+ this.deleteNodesOnly = deleteNodesOnly;
- deletedDocuments = new OpenBitSet(reader.maxDoc());
-
if (s_logger.isDebugEnabled())
{
s_logger.debug("Applying deletions FOR "+id +" (the index ito which these are applied is the previous one ...)");
}
+ }
+
+ public OpenBitSet getDeletedDocuments()
+ {
+ lock.readLock().lock();
try
{
+ if (deletedDocuments != null)
+ {
+ return deletedDocuments;
+ }
+ }
+ finally
+ {
+ lock.readLock().unlock();
+ }
+ lock.writeLock().lock();
+ try
+ {
+ if (deletedDocuments != null)
+ {
+ return deletedDocuments;
+ }
+ deletedDocuments = new OpenBitSet(in.maxDoc());
+
if (!deleteNodesOnly)
{
for (String stringRef : deletions)
{
- TermDocs td = reader.termDocs(new Term("ID", stringRef));
+ TermDocs td = in.termDocs(new Term("ID", stringRef));
while (td.next())
{
deletedDocuments.set(td.doc());
@@ -91,7 +118,7 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
else
{
- Searcher searcher = new IndexSearcher(reader);
+ Searcher searcher = new IndexSearcher(in);
for (String stringRef : deletions)
{
TermQuery query = new TermQuery(new Term("ID", stringRef));
@@ -112,12 +139,18 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
}
// searcher does not need to be closed, the reader is live
}
+ return deletedDocuments;
}
catch (IOException e)
{
- s_logger.error("Error initialising "+id);
- throw new AlfrescoRuntimeException("Failed to construct filtering index reader", e);
+ s_logger.error("Error initialising "+id, e);
+ throw new AlfrescoRuntimeException("Failed to find deleted documents to filter", e);
}
+ finally
+ {
+ lock.writeLock().unlock();
+ }
+
}
// Prevent from actually setting the closed flag
@@ -133,10 +166,8 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
* @author andyh
*
*/
- public static class FilterTermDocs implements TermDocs
+ public class FilterTermDocs implements TermDocs
{
- OpenBitSet deletedDocuments;
-
protected TermDocs in;
String id;
@@ -146,10 +177,9 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
* @param in
* @param deletedDocuments
*/
- public FilterTermDocs(String id, TermDocs in, OpenBitSet deletedDocuments)
+ public FilterTermDocs(String id, TermDocs in)
{
this.in = in;
- this.deletedDocuments = deletedDocuments;
}
public void seek(Term term) throws IOException
@@ -180,15 +210,20 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
{
try
{
- while (in.next())
- {
- if (!deletedDocuments.get(in.doc()))
+ if (!in.next())
+ {
+ return false;
+ }
+ OpenBitSet deletedDocuments = getDeletedDocuments();
+ while (deletedDocuments.get(in.doc()))
+ {
+ if (!in.next())
{
- // Not masked
- return true;
- }
- }
- return false;
+ return false;
+ }
+ }
+ // Not masked
+ return true;
}
catch(IOException ioe)
{
@@ -209,10 +244,17 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
return 0;
}
- if (allDeleted(innerDocs, count))
+ OpenBitSet deletedDocuments = getDeletedDocuments();
+ while (allDeleted(innerDocs, count, deletedDocuments))
{
- // Did not find anything - try again
- return read(docs, freqs);
+
+ count = in.read(innerDocs, innerFreq);
+
+ // Is the stream exhausted
+ if (count == 0)
+ {
+ return 0;
+ }
}
// Add non deleted
@@ -231,7 +273,7 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
return insertPosition;
}
- private boolean allDeleted(int[] docs, int fillSize)
+ private boolean allDeleted(int[] docs, int fillSize, OpenBitSet deletedDocuments)
{
for (int i = 0; i < fillSize; i++)
{
@@ -250,6 +292,7 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
return false;
}
+ OpenBitSet deletedDocuments = getDeletedDocuments();
while (deletedDocuments.get(in.doc()))
{
if (!in.next())
@@ -268,7 +311,7 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
}
/** Base class for filtering {@link TermPositions} implementations. */
- public static class FilterTermPositions extends FilterTermDocs implements TermPositions
+ public class FilterTermPositions extends FilterTermDocs implements TermPositions
{
TermPositions tp;
@@ -278,9 +321,9 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
* @param in
* @param deletedDocuements
*/
- public FilterTermPositions(String id, TermPositions in, OpenBitSet deletedDocuements)
+ public FilterTermPositions(String id, TermPositions in)
{
- super(id, in, deletedDocuements);
+ super(id, in);
tp = in;
}
@@ -308,18 +351,18 @@ public class FilterIndexReaderByStringId extends FilterIndexReader
@Override
public int numDocs()
{
- return super.numDocs() - (int)deletedDocuments.cardinality();
+ return super.numDocs() - (int)getDeletedDocuments().cardinality();
}
@Override
public TermDocs termDocs() throws IOException
{
- return new FilterTermDocs(id, super.termDocs(), deletedDocuments);
+ return new FilterTermDocs(id, super.termDocs());
}
@Override
public TermPositions termPositions() throws IOException
{
- return new FilterTermPositions(id, super.termPositions(), deletedDocuments);
+ return new FilterTermPositions(id, super.termPositions());
}
}
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/LuceneResultSet.java b/source/java/org/alfresco/repo/search/impl/lucene/LuceneResultSet.java
index 8c5bdcf257..9759c2ce0d 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/LuceneResultSet.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/LuceneResultSet.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
+import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.node.NodeBulkLoader;
import org.alfresco.repo.search.AbstractResultSet;
import org.alfresco.repo.search.ResultSetRowIterator;
@@ -196,8 +197,15 @@ public class LuceneResultSet extends AbstractResultSet
prefetch.set(next);
Document doc = hits.doc(next);
String nodeRefStr = doc.get("ID");
- NodeRef nodeRef = tenantService.getBaseName(new NodeRef(nodeRefStr));
- fetchList.add(nodeRef);
+ try
+ {
+ NodeRef nodeRef = tenantService.getBaseName(new NodeRef(nodeRefStr));
+ fetchList.add(nodeRef);
+ }
+ catch (AlfrescoRuntimeException e)
+ {
+ // Ignore IDs that don't parse as NodeRefs, e.g. FTSREF docs
+ }
}
// Now bulk fetch
if (fetchList.size() > 1)
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/fts/FullTextSearchIndexerImpl.java b/source/java/org/alfresco/repo/search/impl/lucene/fts/FullTextSearchIndexerImpl.java
index 4b9f055b08..192fd2a71a 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/fts/FullTextSearchIndexerImpl.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/fts/FullTextSearchIndexerImpl.java
@@ -26,8 +26,9 @@ import org.alfresco.repo.search.BackgroundIndexerAware;
import org.alfresco.repo.search.Indexer;
import org.alfresco.repo.search.IndexerAndSearcher;
import org.alfresco.repo.search.SupportsBackgroundIndexing;
-import org.alfresco.repo.search.impl.lucene.index.IndexInfo;
+import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.StoreRef;
+import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeansException;
@@ -51,6 +52,8 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
private static Set indexing = new HashSet();
private IndexerAndSearcher indexerAndSearcherFactory;
+
+ private TransactionService transactionService;
private int pauseCount = 0;
@@ -201,30 +204,42 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
int done = 0;
while (done == 0)
{
- StoreRef toIndex = getNextRef();
+ final StoreRef toIndex = getNextRef();
if (toIndex != null)
{
if(s_logger.isDebugEnabled())
{
s_logger.debug("FTS Indexing "+toIndex+" at "+(new java.util.Date()));
}
- Indexer indexer = indexerAndSearcherFactory.getIndexer(toIndex);
- if(indexer instanceof BackgroundIndexerAware)
+ try
{
- BackgroundIndexerAware backgroundIndexerAware = (BackgroundIndexerAware)indexer;
- backgroundIndexerAware.registerCallBack(this);
- try
- {
- done += backgroundIndexerAware.updateFullTextSearch(batchSize);
- }
- catch (Exception ex)
- {
- if(s_logger.isWarnEnabled())
- {
- s_logger.warn("FTS Job threw exception", ex);
- }
- done = 1; // better luck next time
- }
+ done += transactionService.getRetryingTransactionHelper().doInTransaction(
+ new RetryingTransactionCallback()
+ {
+ @Override
+ public Integer execute() throws Throwable
+ {
+ Indexer indexer = indexerAndSearcherFactory.getIndexer(toIndex);
+ // Activate database 'read through' behaviour so that we don't end up with stale
+ // caches during this potentially long running transaction
+ indexer.setReadThrough(true);
+ if (indexer instanceof BackgroundIndexerAware)
+ {
+ BackgroundIndexerAware backgroundIndexerAware = (BackgroundIndexerAware) indexer;
+ backgroundIndexerAware.registerCallBack(FullTextSearchIndexerImpl.this);
+ return backgroundIndexerAware.updateFullTextSearch(batchSize);
+ }
+ return 0;
+ }
+ });
+ }
+ catch (Exception ex)
+ {
+ if (s_logger.isWarnEnabled())
+ {
+ s_logger.warn("FTS Job threw exception", ex);
+ }
+ done = 1; // better luck next time
}
}
else
@@ -277,6 +292,14 @@ public class FullTextSearchIndexerImpl implements FTSIndexerAware, FullTextSearc
{
this.indexerAndSearcherFactory = indexerAndSearcherFactory;
}
+
+ /**
+ * @param transactionService
+ */
+ public void setTransactionService(TransactionService transactionService)
+ {
+ this.transactionService = transactionService;
+ }
/**
* @param args
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java b/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java
index 0f01b93b9a..be4c6b18f9 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/index/IndexInfo.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2005-2010 Alfresco Software Limited.
+ * Copyright (C) 2005-2011 Alfresco Software Limited.
*
* This file is part of Alfresco
*
@@ -348,6 +348,7 @@ public class IndexInfo implements IndexMonitor
* Main index reader
*/
private IndexReader mainIndexReader;
+ private Map mainIndexReaders = new HashMap();
/**
* Index writers for deltas
@@ -505,6 +506,7 @@ public class IndexInfo implements IndexMonitor
}
}
// Delete entries that are not required
+ invalidateMainReadersFromFirst(deletable);
for (String id : deletable)
{
indexEntries.remove(id);
@@ -845,7 +847,6 @@ public class IndexInfo implements IndexMonitor
}
entry.setStatus(TransactionStatus.COMMITTED);
registerReferenceCountingIndexReader(entry.getName(), buildReferenceCountingIndexReader(entry.getName(), entry.getDocumentCount()));
- mainIndexReader = null;
break;
// States that require no action
case COMMITTED:
@@ -857,6 +858,7 @@ public class IndexInfo implements IndexMonitor
}
}
// Delete entries that are not required
+ invalidateMainReadersFromFirst(deletable);
for (String id : deletable)
{
indexEntries.remove(id);
@@ -1281,6 +1283,36 @@ public class IndexInfo implements IndexMonitor
}
}
+ private void invalidateMainReadersFromFirst(Set ids) throws IOException
+ {
+ boolean found = false;
+ for (String id : indexEntries.keySet())
+ {
+ if (!found && ids.contains(id))
+ {
+ found = true;
+ }
+ if (found)
+ {
+ IndexReader main = mainIndexReaders.remove(id);
+ if (main != null)
+ {
+ ((ReferenceCounting) main).setInvalidForReuse();
+ }
+ }
+ }
+
+ if (found)
+ {
+ if(mainIndexReader != null)
+ {
+ ((ReferenceCounting) mainIndexReader).setInvalidForReuse();
+ mainIndexReader = null;
+ }
+ }
+
+ }
+
/**
* Get the main reader for committed index data
*
@@ -1300,6 +1332,10 @@ public class IndexInfo implements IndexMonitor
getWriteLock();
try
{
+ if (mainIndexReader != null)
+ {
+ ((ReferenceCounting)mainIndexReader).setInvalidForReuse();
+ }
mainIndexReader = null;
}
finally
@@ -1397,6 +1433,10 @@ public class IndexInfo implements IndexMonitor
getWriteLock();
try
{
+ if (mainIndexReader != null)
+ {
+ ((ReferenceCounting)mainIndexReader).setInvalidForReuse();
+ }
mainIndexReader = null;
}
finally
@@ -1738,6 +1778,7 @@ public class IndexInfo implements IndexMonitor
reordered.put(entry.getName(), entry);
reordered.put(current.getName(), current);
addedPreparedEntry = true;
+ invalidateMainReadersFromFirst(Collections.singleton(current.getName()));
}
else if (current.getName().equals(entry.getName()))
{
@@ -1843,6 +1884,7 @@ public class IndexInfo implements IndexMonitor
if (TransactionStatus.COMMITTED.follows(entry.getStatus()))
{
// Do the deletions
+ invalidateMainReadersFromFirst(Collections.singleton(id));
if ((entry.getDocumentCount() + entry.getDeletions()) == 0)
{
registerReferenceCountingIndexReader(id, tl.get());
@@ -2004,6 +2046,7 @@ public class IndexInfo implements IndexMonitor
if (TransactionStatus.DELETABLE.follows(entry.getStatus()))
{
+ invalidateMainReadersFromFirst(Collections.singleton(id));
indexEntries.remove(id);
writeStatus();
clearOldReaders();
@@ -2147,7 +2190,7 @@ public class IndexInfo implements IndexMonitor
clearInvalid(inValid);
}
- private void clearInvalid(HashSet inValid) throws IOException
+ private void clearInvalid(Set inValid) throws IOException
{
boolean hasInvalid = false;
for (String id : inValid)
@@ -2157,13 +2200,24 @@ public class IndexInfo implements IndexMonitor
{
s_logger.debug("... invalidating sub reader " + id);
}
- ReferenceCounting referenceCounting = (ReferenceCounting) reader;
- referenceCounting.setInvalidForReuse();
- deletableReaders.add(reader);
- hasInvalid = true;
+ if (reader != null)
+ {
+ ReferenceCounting referenceCounting = (ReferenceCounting) reader;
+ referenceCounting.setInvalidForReuse();
+ deletableReaders.add(reader);
+ hasInvalid = true;
+ }
}
if (hasInvalid)
{
+ for (String id : inValid)
+ {
+ IndexReader main = mainIndexReaders.remove(id);
+ if (main != null)
+ {
+ ((ReferenceCounting) main).setInvalidForReuse();
+ }
+ }
if (mainIndexReader != null)
{
if (s_logger.isDebugEnabled())
@@ -2179,6 +2233,7 @@ public class IndexInfo implements IndexMonitor
private IndexReader createMainIndexReader() throws IOException
{
IndexReader reader = null;
+ IndexReader oldReader = null;
for (String id : indexEntries.keySet())
{
IndexEntry entry = indexEntries.get(id);
@@ -2188,33 +2243,35 @@ public class IndexInfo implements IndexMonitor
if (reader == null)
{
reader = subReader;
- reader.incRef();
}
else
{
- if (entry.getType() == IndexType.INDEX)
+ boolean oldReaderIsSubReader = oldReader == null;
+ oldReader = reader;
+ reader = mainIndexReaders.get(id);
+ if (reader == null)
{
- IndexReader oldReader = reader;
- reader = new MultiReader(new IndexReader[] { oldReader, subReader }, false);
- // Cancel out the incRef on the old reader
- oldReader.decRef();
- }
- else if (entry.getType() == IndexType.DELTA)
- {
- try
+ if (entry.getType() == IndexType.INDEX)
{
- IndexReader oldReader = reader;
- IndexReader filterReader = new FilterIndexReaderByStringId(id, oldReader, getDeletions(entry.getName()), entry.isDeletOnlyNodes());
- reader = new MultiReader(new IndexReader[] { filterReader, subReader }, false);
- // Cancel out the incRef on the old readers
- oldReader.decRef();
- filterReader.decRef();
+ reader = new MultiReader(new IndexReader[] { oldReader, subReader }, false);
}
- catch (IOException ioe)
+ else if (entry.getType() == IndexType.DELTA)
{
- s_logger.error("Failed building filter reader beneath " + entry.getName(), ioe);
- throw ioe;
+ try
+ {
+ IndexReader filterReader = new FilterIndexReaderByStringId(id, oldReader, getDeletions(entry.getName()), entry.isDeletOnlyNodes());
+ reader = new MultiReader(new IndexReader[] { filterReader, subReader }, false);
+ // Cancel out the incRef on the filter reader
+ filterReader.decRef();
+ }
+ catch (IOException ioe)
+ {
+ s_logger.error("Failed building filter reader beneath " + entry.getName(), ioe);
+ throw ioe;
+ }
}
+ reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(id+"multi", reader, true, config);
+ mainIndexReaders.put(id, reader);
}
}
}
@@ -2223,6 +2280,12 @@ public class IndexInfo implements IndexMonitor
{
reader = IndexReader.open(emptyIndex);
}
+ else
+ {
+ // Keep this reader open whilst it is referenced by mainIndexReaders / referenceCountingReadOnlyIndexReaders
+ reader.incRef();
+ }
+
reader = ReferenceCountingReadOnlyIndexReaderFactory.createReader(MAIN_READER, reader, false, config);
return reader;
}
@@ -2237,8 +2300,9 @@ public class IndexInfo implements IndexMonitor
return reader;
}
- private void registerReferenceCountingIndexReader(String id, IndexReader reader)
+ private void registerReferenceCountingIndexReader(String id, IndexReader reader) throws IOException
{
+ clearInvalid(Collections.singleton(id));
ReferenceCounting referenceCounting = (ReferenceCounting) reader;
if (!referenceCounting.getId().equals(id))
{
@@ -3160,6 +3224,14 @@ public class IndexInfo implements IndexMonitor
deleteQueue.add(refCounting.getId());
i.remove();
}
+ else if (s_logger.isTraceEnabled() && refCounting.getCreationTime() < System.currentTimeMillis() - 120000)
+ {
+ for (Throwable t : refCounting.getReferences())
+ {
+ s_logger.trace(t.getMessage(), t);
+ }
+ }
+
}
Iterator j = deleteQueue.iterator();
@@ -3641,6 +3713,7 @@ public class IndexInfo implements IndexMonitor
}
// Delete entries that are not required
+ invalidateMainReadersFromFirst(deletable);
for (String id : deletable)
{
indexEntries.remove(id);
@@ -3767,96 +3840,100 @@ public class IndexInfo implements IndexMonitor
}
// Build readers
- final HashSet invalidIndexes = new HashSet();
+ int size = 2 * (toDelete.size() + indexes.size());
+ final HashSet invalidIndexes = new HashSet(size);
- final HashMap newIndexCounts = new HashMap();
-
- LinkedHashMap readers = new LinkedHashMap();
- for (IndexEntry entry : indexes.values())
- {
- File location = new File(indexDirectory, entry.getName()).getCanonicalFile();
- IndexReader reader;
- if (IndexReader.indexExists(location))
- {
- reader = IndexReader.open(location);
- }
- else
- {
- reader = IndexReader.open(emptyIndex);
- }
- readers.put(entry.getName(), reader);
- }
+ final HashMap newIndexCounts = new HashMap(size);
+ LinkedHashMap readers = new LinkedHashMap(size);
for (IndexEntry currentDelete : toDelete.values())
{
Set deletions = getDeletions(currentDelete.getName());
- for (String key : readers.keySet())
+ if (!deletions.isEmpty())
{
- IndexReader reader = readers.get(key);
- for (String stringRef : deletions)
+ for (String key : indexes.keySet())
{
- if (currentDelete.isDeletOnlyNodes())
+ IndexReader reader = getReferenceCountingIndexReader(key);
+ Searcher searcher = new IndexSearcher(reader);
+ try
{
- Searcher searcher = new IndexSearcher(reader);
-
- TermQuery query = new TermQuery(new Term("ID", stringRef));
- Hits hits = searcher.search(query);
- if (hits.length() > 0)
+ for (String stringRef : deletions)
{
- for (int i = 0; i < hits.length(); i++)
+ TermQuery query = new TermQuery(new Term("ID", stringRef));
+ Hits hits = searcher.search(query);
+ if (hits.length() > 0)
{
- Document doc = hits.doc(i);
- if (doc.getField("ISCONTAINER") == null)
+ IndexReader writeableReader = readers.get(key);
+ if (writeableReader == null)
{
- reader.deleteDocument(hits.id(i));
- invalidIndexes.add(key);
- // There should only be one thing to
- // delete
- // break;
+ File location = new File(indexDirectory, key).getCanonicalFile();
+ if (IndexReader.indexExists(location))
+ {
+ writeableReader = IndexReader.open(location);
+ }
+ else
+ {
+ continue;
+ }
+ readers.put(key, writeableReader);
+ }
+
+ if (currentDelete.isDeletOnlyNodes())
+ {
+ Searcher writeableSearcher = new IndexSearcher(writeableReader);
+ hits = writeableSearcher.search(query);
+ if (hits.length() > 0)
+ {
+ for (int i = 0; i < hits.length(); i++)
+ {
+ Document doc = hits.doc(i);
+ if (doc.getField("ISCONTAINER") == null)
+ {
+ writeableReader.deleteDocument(hits.id(i));
+ invalidIndexes.add(key);
+ // There should only be one thing to
+ // delete
+ // break;
+ }
+ }
+ }
+ writeableSearcher.close();
+ }
+ else
+ {
+ int deletedCount = 0;
+ try
+ {
+ deletedCount = writeableReader.deleteDocuments(new Term("ID", stringRef));
+ }
+ catch (IOException ioe)
+ {
+ if (s_logger.isDebugEnabled())
+ {
+ s_logger.debug("IO Error for " + key);
+ throw ioe;
+ }
+ }
+ if (deletedCount > 0)
+ {
+ if (s_logger.isDebugEnabled())
+ {
+ s_logger.debug("Deleted " + deletedCount + " from " + key + " for id " + stringRef + " remaining docs " + writeableReader.numDocs());
+ }
+ invalidIndexes.add(key);
+ }
}
}
}
- searcher.close();
-
}
- else
+ finally
{
- int deletedCount = 0;
- try
- {
- deletedCount = reader.deleteDocuments(new Term("ID", stringRef));
- }
- catch (IOException ioe)
- {
- if (s_logger.isDebugEnabled())
- {
- s_logger.debug("IO Error for " + key);
- throw ioe;
- }
- }
- if (deletedCount > 0)
- {
- if (s_logger.isDebugEnabled())
- {
- s_logger.debug("Deleted " + deletedCount + " from " + key + " for id " + stringRef + " remaining docs " + reader.numDocs());
- }
- invalidIndexes.add(key);
- }
- }
+ searcher.close();
+ }
}
-
}
- File location = new File(indexDirectory, currentDelete.getName()).getCanonicalFile();
- IndexReader reader;
- if (IndexReader.indexExists(location))
- {
- reader = IndexReader.open(location);
- }
- else
- {
- reader = IndexReader.open(emptyIndex);
- }
- readers.put(currentDelete.getName(), reader);
+ // The delta we have just processed now must be included when we process the deletions of its successor
+ indexes.put(currentDelete.getName(), currentDelete);
}
// Close all readers holding the write lock - so no one tries to
@@ -3929,25 +4006,10 @@ public class IndexInfo implements IndexMonitor
IndexReader newReader = newReaders.get(id);
registerReferenceCountingIndexReader(id, newReader);
}
- if (invalidIndexes.size() > 0)
- {
- if (mainIndexReader != null)
- {
- if (s_logger.isDebugEnabled())
- {
- s_logger.debug("... invalidating main index reader after applying deletions");
- }
- ((ReferenceCounting) mainIndexReader).setInvalidForReuse();
- }
- else
- {
- if (s_logger.isDebugEnabled())
- {
- s_logger.debug("... no main index reader to invalidate after applying deletions");
- }
- }
- mainIndexReader = null;
- }
+
+ // Invalidate all main index readers from the first invalid index onwards
+ invalidateMainReadersFromFirst(invalidIndexes);
+
if (s_logger.isDebugEnabled())
{
@@ -4047,6 +4109,7 @@ public class IndexInfo implements IndexMonitor
set.put(guid, target);
// rebuild merged index elements
LinkedHashMap reordered = new LinkedHashMap();
+ invalidateMainReadersFromFirst(Collections.singleton(firstMergeId));
for (IndexEntry current : indexEntries.values())
{
if (current.getName().equals(firstMergeId))
@@ -4215,6 +4278,7 @@ public class IndexInfo implements IndexMonitor
}
}
+ invalidateMainReadersFromFirst(toDelete);
for (String id : toDelete)
{
indexEntries.remove(id);
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java
index 963a09c7fb..4f50a81033 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCounting.java
@@ -19,6 +19,7 @@
package org.alfresco.repo.search.impl.lucene.index;
import java.io.IOException;
+import java.util.List;
/**
* Reference counting and caching for read only index access.
@@ -31,6 +32,10 @@ import java.io.IOException;
*/
public interface ReferenceCounting
{
+ public long getCreationTime();
+
+ public List getReferences();
+
/**
* Get the number of references
* @return
diff --git a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java
index b55289480c..5200c98c45 100644
--- a/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java
+++ b/source/java/org/alfresco/repo/search/impl/lucene/index/ReferenceCountingReadOnlyIndexReaderFactory.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Timer;
import java.util.WeakHashMap;
@@ -124,6 +125,9 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
private LuceneConfig config;
+ private final long creationTime;
+ private final List references;
+
static
{
Class c = IndexReader.class;
@@ -146,6 +150,9 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
ReferenceCountingReadOnlyIndexReader(String id, IndexReader indexReader, boolean enableCaching, LuceneConfig config)
{
super(indexReader);
+ this.creationTime = System.currentTimeMillis();
+ this.references = new LinkedList();
+ references.add(new Exception(this.refCount + ": " + indexReader.toString()));
this.id = id;
if (enableCaching && (config != null))
{
@@ -154,6 +161,18 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
this.config = config;
}
+ @Override
+ public synchronized long getCreationTime()
+ {
+ return this.creationTime;
+ }
+
+ @Override
+ public synchronized List getReferences()
+ {
+ return this.references;
+ }
+
@Override
public synchronized void incRef()
{
@@ -184,6 +203,7 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
throw new AlfrescoRuntimeException("Failed to mark index as open ..", e);
}
}
+ references.add(new Exception(this.refCount + ": " + in.toString()));
}
private synchronized void decrementReferenceCount() throws IOException
@@ -198,6 +218,7 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
{
s_logger.error("Invalid reference count for Reader " + id + " is " + refCount + " ... " + super.toString());
}
+ references.add(new Exception(this.refCount + ": " + in.toString()));
}
private void closeIfRequired() throws IOException
@@ -274,6 +295,15 @@ public class ReferenceCountingReadOnlyIndexReaderFactory
super.decRef();
}
}
+
+ /**
+ * We want to avoid setting the closed flag on our wrapped stream, passing on all decrefs.
+ **/
+ @Override
+ protected void doClose() throws IOException
+ {
+ in.decRef();
+ }
@Override
protected void doDelete(int n) throws IOException
diff --git a/source/java/org/alfresco/repo/security/SecurityTestSuite.java b/source/java/org/alfresco/repo/security/SecurityTestSuite.java
index 55474eee8b..f1b2c1f155 100644
--- a/source/java/org/alfresco/repo/security/SecurityTestSuite.java
+++ b/source/java/org/alfresco/repo/security/SecurityTestSuite.java
@@ -22,7 +22,6 @@ import junit.framework.JUnit4TestAdapter;
import junit.framework.Test;
import junit.framework.TestSuite;
-import org.alfresco.repo.audit.access.AccessAuditorTest;
import org.alfresco.repo.ownable.impl.OwnableServiceTest;
import org.alfresco.repo.security.authentication.AuthenticationBootstrapTest;
import org.alfresco.repo.security.authentication.AuthenticationTest;
diff --git a/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java b/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java
index d4793860e2..100913d3b5 100644
--- a/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java
+++ b/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider.java
@@ -48,7 +48,7 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider,
/**
* The home folder manager
*/
- private HomeFolderManager homeFolderManager;
+ private PortableHomeFolderManager homeFolderManager;
/**
* The store ref in which to conduct searches
@@ -97,7 +97,7 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider,
/**
* Get the home folder manager.
*/
- protected HomeFolderManager getHomeFolderManager()
+ protected PortableHomeFolderManager getHomeFolderManager()
{
return homeFolderManager;
}
@@ -106,7 +106,7 @@ public abstract class AbstractHomeFolderProvider implements HomeFolderProvider,
* Set the home folder manager.
* @param homeFolderManager
*/
- public void setHomeFolderManager(HomeFolderManager homeFolderManager)
+ public void setHomeFolderManager(PortableHomeFolderManager homeFolderManager)
{
this.homeFolderManager = homeFolderManager;
}
diff --git a/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider2.java b/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider2.java
index 09c458c7a8..e78cccc1c1 100644
--- a/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider2.java
+++ b/source/java/org/alfresco/repo/security/person/AbstractHomeFolderProvider2.java
@@ -27,7 +27,7 @@ import org.springframework.beans.factory.InitializingBean;
/**
* Abstract class that implements {@link HomeFolderProvider2} which
- * works with the {@link HomeFolderManager} (which performs most of
+ * works with the {@link PortableHomeFolderManager} (which performs most of
* the work) to create home folders in custom locations.
*
* @author Alan Davis
@@ -43,7 +43,7 @@ public abstract class AbstractHomeFolderProvider2 implements
/**
* The home folder manager
*/
- private HomeFolderManager homeFolderManager;
+ private PortableHomeFolderManager homeFolderManager;
/**
* The store URL.
@@ -83,7 +83,7 @@ public abstract class AbstractHomeFolderProvider2 implements
/**
* Get the home folder manager.
*/
- protected HomeFolderManager getHomeFolderManager()
+ protected PortableHomeFolderManager getHomeFolderManager()
{
return homeFolderManager;
}
@@ -92,7 +92,7 @@ public abstract class AbstractHomeFolderProvider2 implements
* Set the home folder manager.
* @param homeFolderManager
*/
- public void setHomeFolderManager(HomeFolderManager homeFolderManager)
+ public void setHomeFolderManager(PortableHomeFolderManager homeFolderManager)
{
this.homeFolderManager = homeFolderManager;
}
diff --git a/source/java/org/alfresco/repo/security/person/HomeFolderManager.java b/source/java/org/alfresco/repo/security/person/HomeFolderManager.java
index 1dc7da544f..eef5c2632c 100644
--- a/source/java/org/alfresco/repo/security/person/HomeFolderManager.java
+++ b/source/java/org/alfresco/repo/security/person/HomeFolderManager.java
@@ -18,480 +18,19 @@
*/
package org.alfresco.repo.security.person;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.alfresco.model.ContentModel;
-import org.alfresco.repo.node.NodeServicePolicies;
-import org.alfresco.repo.policy.JavaBehaviour;
-import org.alfresco.repo.policy.PolicyComponent;
-import org.alfresco.repo.security.authentication.AuthenticationUtil;
-import org.alfresco.repo.tenant.TenantService;
-import org.alfresco.service.ServiceRegistry;
-import org.alfresco.service.cmr.model.FileFolderService;
-import org.alfresco.service.cmr.model.FileFolderUtil;
-import org.alfresco.service.cmr.model.FileInfo;
-import org.alfresco.service.cmr.model.FileNotFoundException;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
-import org.alfresco.service.cmr.repository.NodeRef;
-import org.alfresco.service.cmr.repository.NodeService;
-import org.alfresco.service.cmr.repository.StoreRef;
-import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
-import org.alfresco.service.namespace.NamespaceService;
-import org.alfresco.service.namespace.QName;
/**
- * Manage home folder creation by binding to events from the cm:person type.
- *
- * @author Andy Hind,
- * Alan Davis (support v1 and v2 HomeFolderProviders - code from
- * v1 HomeFolderProviders moved into HomeFolderManager).
+ * An object responsible for creating / validating the home folder for a given person node child association reference.
*/
-public class HomeFolderManager implements NodeServicePolicies.OnCreateNodePolicy
+public interface HomeFolderManager
{
- private PolicyComponent policyComponent;
-
- private NodeService nodeService;
-
- private boolean enableHomeFolderCreationAsPeopleAreCreated = false;
-
- private ServiceRegistry serviceRegistry;
-
- private TenantService tenantService;
-
/**
- * A default provider
- */
- private HomeFolderProvider2 defaultProvider;
-
- /**
- * Original Providers (now depreciated) that have registered and are looked up by bean name.
- */
- @SuppressWarnings("deprecation")
- private Map v1Providers = new HashMap();
-
- /**
- * Providers that have registered and are looked up by bean name.
- */
- private Map v2Providers = new HashMap();
-
- /**
- * Cache the result of the path look up.
- */
- private Map> rootPathNodeRefMaps =
- new ConcurrentHashMap>();
-
- /**
- * Bind the class behaviour to this implementation
- */
- public void init() throws Exception
- {
- if (enableHomeFolderCreationAsPeopleAreCreated)
- {
- policyComponent.bindClassBehaviour(QName.createQName(NamespaceService.ALFRESCO_URI, "onCreateNode"), ContentModel.TYPE_PERSON, new JavaBehaviour(this, "onCreateNode"));
- }
- }
-
- public void setEnableHomeFolderCreationAsPeopleAreCreated(boolean enableHomeFolderCreationAsPeopleAreCreated)
- {
- this.enableHomeFolderCreationAsPeopleAreCreated = enableHomeFolderCreationAsPeopleAreCreated;
- }
-
- /**
- * Set the policy component.
+ * Create / move the home folder if required.
*
- * @param policyComponent
+ * @param childAssocRef
+ * the primary child association pointing to the person node.
*/
- public void setPolicyComponent(PolicyComponent policyComponent)
- {
- this.policyComponent = policyComponent;
- }
+ public void makeHomeFolder(ChildAssociationRef childAssocRef);
- /**
- * Set the node service.
- * @param nodeService
- */
- public void setNodeService(NodeService nodeService)
- {
- this.nodeService = nodeService;
- }
-
- /**
- * Set the service registry.
- */
- public void setServiceRegistry(ServiceRegistry serviceRegistry)
- {
- this.serviceRegistry = serviceRegistry;
- }
-
- /**
- * Set the tenant service
- */
- public void setTenantService(TenantService tenantService)
- {
- this.tenantService = tenantService;
- }
-
- /**
- * Register a home folder provider.
- *
- * @param provider
- */
- @SuppressWarnings("deprecation")
- public void addProvider(HomeFolderProvider provider)
- {
- v1Providers.put(provider.getName(), provider);
- }
-
- /**
- * Register a home folder provider.
- *
- * @param provider
- */
- public void addProvider(HomeFolderProvider2 provider)
- {
- v2Providers.put(provider.getName(), provider);
- }
-
- /**
- * Returns the version 1 HomeFolderProvider with the given name.
- */
- @SuppressWarnings("deprecation")
- public HomeFolderProvider getHomeFolderProvider1(String providerName)
- {
- return v1Providers.get(providerName);
- }
-
- /**
- * Returns the version 2 HomeFolderProvider2 with the given name.
- */
- public HomeFolderProvider2 getHomeFolderProvider2(String providerName)
- {
- return v2Providers.get(providerName);
- }
-
- /**
- * Set the default home folder provider (user which none is specified or when one is not found)
- * @param defaultProvider
- */
- public void setDefaultProvider(HomeFolderProvider2 defaultProvider)
- {
- this.defaultProvider = defaultProvider;
- }
-
- /**
- * Find the provider and call if eager home folder creation is enabled.
- */
- public void onCreateNode(ChildAssociationRef childAssocRef)
- {
- if (enableHomeFolderCreationAsPeopleAreCreated)
- {
- makeHomeFolder(childAssocRef);
- }
- }
-
- /**
- * Find the provider and call.
- */
- @SuppressWarnings("deprecation")
- public void makeHomeFolder(ChildAssociationRef childAssocRef)
- {
- HomeFolderProvider2 v2Provider = defaultProvider;
- HomeFolderProvider v1Provider = null;
- String providerName = DefaultTypeConverter.INSTANCE.convert(
- String.class, nodeService.getProperty(childAssocRef
- .getChildRef(), ContentModel.PROP_HOME_FOLDER_PROVIDER));
- if (providerName != null)
- {
- v2Provider = getHomeFolderProvider2(providerName);
- if (v2Provider == null)
- {
- v1Provider = getHomeFolderProvider1(providerName);
- if (v1Provider == null)
- {
- v2Provider = defaultProvider;
- }
- }
- }
- else
- {
- providerName = defaultProvider.getName();
- nodeService.setProperty(childAssocRef.getChildRef(),
- ContentModel.PROP_HOME_FOLDER_PROVIDER, providerName);
- }
- if (v2Provider != null)
- {
- // If a V2Adaptor we still must call onCreateNode just like a
- // v1 HomeFolderProvider in case it has been overridden
- if (v2Provider instanceof AbstractHomeFolderProvider.V2Adaptor)
- {
- ((AbstractHomeFolderProvider.V2Adaptor)v2Provider).onCreateNode(childAssocRef);
- }
- else
- {
- homeFolderCreateAndSetPermissions(v2Provider, childAssocRef.getChildRef());
- }
- }
- else if (v1Provider != null)
- {
- v1Provider.onCreateNode(childAssocRef);
- }
- }
-
- void homeFolderCreateAndSetPermissions(HomeFolderProvider2 provider, NodeRef personNodeRef)
- {
- AuthenticationUtil.RunAsWork action =
- new RunAsCreateAndSetPermissions(provider, personNodeRef);
- AuthenticationUtil.runAs(action, AuthenticationUtil.getSystemUserName());
- }
-
- /**
- * Helper class to encapsulate the creation and setting permissions etc
- */
- private class RunAsCreateAndSetPermissions implements AuthenticationUtil.RunAsWork
- {
- NodeRef personNodeRef;
- HomeFolderProvider2 provider;
-
- RunAsCreateAndSetPermissions(HomeFolderProvider2 provider, NodeRef personNodeRef)
- {
- this.personNodeRef = personNodeRef;
- this.provider = provider;
- }
-
- public NodeRef doWork() throws Exception
- {
- // Get home folder
- HomeSpaceNodeRef homeFolder = provider.getHomeFolder(personNodeRef);
-
- // If it exists
- if (homeFolder.getNodeRef() != null)
- {
- // Get uid and keep
- String uid = DefaultTypeConverter.INSTANCE.convert(String.class,
- serviceRegistry.getNodeService().getProperty(
- personNodeRef, ContentModel.PROP_USERNAME));
-
- // If created or found then set (other wise it was already set correctly)
- if (homeFolder.getStatus() != HomeSpaceNodeRef.Status.VALID)
- {
- serviceRegistry.getNodeService().setProperty(
- personNodeRef, ContentModel.PROP_HOMEFOLDER, homeFolder.getNodeRef());
- }
-
- final String providerSuppliedOwner = provider.getOwner();
- String owner = (providerSuppliedOwner == null) ? uid : providerSuppliedOwner;
- // If created..
- if (homeFolder.getStatus() == HomeSpaceNodeRef.Status.CREATED)
- {
- PermissionsManager onCreatePermissionsManager =
- provider.getOnCreatePermissionsManager();
- if (onCreatePermissionsManager != null)
- {
- onCreatePermissionsManager.setPermissions(
- homeFolder.getNodeRef(), owner, uid);
- }
- }
- else
- {
- PermissionsManager onReferencePermissionsManager =
- provider.getOnReferencePermissionsManager();
- if (onReferencePermissionsManager != null)
- {
- onReferencePermissionsManager.setPermissions(
- homeFolder.getNodeRef(), owner, uid);
- }
- }
- }
- return homeFolder.getNodeRef();
- }
- }
-
- private StoreRef getStoreRef(HomeFolderProvider2 provider)
- {
- // Could check to see if provider is a V2Adaptor to avoid
- // object creation, but there is little point.
- return new StoreRef(provider.getStoreUrl());
- }
-
- /**
- * Helper method for {@link HomeFolderProvider2.getHomeFolder} (so that it
- * does not need its own NodeService) that returns a person property value.
- */
- public String getPersonProperty(NodeRef person, QName name)
- {
- String value = DefaultTypeConverter.INSTANCE.convert(String.class, nodeService.getProperty(person, name));
-
- if(value == null || value.length() == 0)
- {
- throw new PersonException("Can not create a home folder when the "+name+" property is null or empty");
- }
- return value;
- }
-
- void clearCaches(HomeFolderProvider2 provider)
- {
- getRootPathNodeRefMap(provider).clear();
- }
-
- NodeRef getRootPathNodeRef(HomeFolderProvider2 provider)
- {
- String rootPath = provider.getRootPath();
- String tenantDomain = (tenantService != null ? tenantService.getCurrentUserDomain() : TenantService.DEFAULT_DOMAIN);
- Map rootPathNodeRefMap = getRootPathNodeRefMap(provider);
- NodeRef rootPathNodeRef = rootPathNodeRefMap.get(tenantDomain);
- if (rootPathNodeRef == null)
- {
- // ok with race condition for initial construction
- rootPathNodeRef = resolvePath(provider, rootPath);
- rootPathNodeRefMap.put(tenantDomain, rootPathNodeRef);
- }
- return rootPathNodeRef;
- }
-
- private Map getRootPathNodeRefMap(HomeFolderProvider2 provider)
- {
- String name = provider.getName();
- Map rootPathNodeRefMap = rootPathNodeRefMaps.get(name);
- if (rootPathNodeRefMap == null)
- {
- // ok with race condition for initial construction
- rootPathNodeRefMap = new ConcurrentHashMap();
- rootPathNodeRefMaps.put(name, rootPathNodeRefMap);
- }
- return rootPathNodeRefMap;
- }
-
- /**
- * Utility method to resolve paths to nodes.
- */
- NodeRef resolvePath(HomeFolderProvider2 provider, String pathToResolve)
- {
- List refs = serviceRegistry.getSearchService().selectNodes(
- serviceRegistry.getNodeService().getRootNode(getStoreRef(provider)),
- pathToResolve, null,
- serviceRegistry.getNamespaceService(), false);
- if (refs.size() != 1)
- {
- throw new IllegalStateException("Non-unique path: found : " +
- pathToResolve + " " + refs.size());
- }
- return refs.get(0);
- }
-
- /**
- * Helper method for {@link HomeFolderProvider2.getHomeFolder(NodeRef)}
- * implementations to return a {@link HomeSpaceNodeRef}
- * @param referenceRootNode indicates that a reference to the root node
- * should be returned if the home folder property on the person
- * has not yet been set.
- */
- public HomeSpaceNodeRef getHomeFolder(HomeFolderProvider2 provider, NodeRef person, boolean referenceRootNode)
- {
- HomeSpaceNodeRef homeSpaceNodeRef = null;
- NodeRef existingHomeFolder = DefaultTypeConverter.INSTANCE.convert(
- NodeRef.class, serviceRegistry.getNodeService().getProperty(
- person, ContentModel.PROP_HOMEFOLDER));
- if (existingHomeFolder != null)
- {
- homeSpaceNodeRef = new HomeSpaceNodeRef(existingHomeFolder,
- HomeSpaceNodeRef.Status.VALID);
- }
- else if (referenceRootNode)
- {
- homeSpaceNodeRef = new HomeSpaceNodeRef(getRootPathNodeRef(provider),
- HomeSpaceNodeRef.Status.REFERENCED);
- }
- else
- {
- FileFolderService fileFolderService = serviceRegistry.getFileFolderService();
- List homeFolderPath = provider.getHomeFolderPath(person);
-
- FileInfo fileInfo;
-
- // Test if it already exists
- NodeRef existing = getExisting(provider, fileFolderService, homeFolderPath);
- if (existing != null)
- {
- fileInfo = fileFolderService.getFileInfo(existing);
- }
- else
- {
- fileInfo = createTree(provider, getRootPathNodeRef(provider), homeFolderPath,
- provider.getTemplateNodeRef(), fileFolderService);
- }
- NodeRef homeFolderNodeRef = fileInfo.getNodeRef();
- return new HomeSpaceNodeRef(homeFolderNodeRef, HomeSpaceNodeRef.Status.CREATED);
- }
- return homeSpaceNodeRef;
- }
-
- private NodeRef getExisting(HomeFolderProvider2 provider, FileFolderService fileFolderService,
- List homeFolderPath)
- {
- NodeRef existing;
- try
- {
- FileInfo existingFileInfo = fileFolderService.resolveNamePath(getRootPathNodeRef(provider), homeFolderPath);
- existing = existingFileInfo.getNodeRef();
- }
- catch (FileNotFoundException fnfe)
- {
- existing = null;// home folder noderef doesn't exist yet
- }
- return existing;
- }
-
- /**
- * creates a tree of folder nodes based on the path elements provided.
- */
- private FileInfo createTree(HomeFolderProvider2 provider, NodeRef root,
- List homeFolderPath, NodeRef templateNodeRef,
- FileFolderService fileFolderService)
- {
- NodeRef newParent = createNewParentIfRequired(root, homeFolderPath, fileFolderService);
- String homeFolderName = homeFolderPath.get(homeFolderPath.size()-1);
- FileInfo fileInfo;
- if (templateNodeRef == null)
- {
- fileInfo = fileFolderService.create(
- newParent,
- homeFolderName,
- ContentModel.TYPE_FOLDER);
- }
- else
- {
- try
- {
- fileInfo = fileFolderService.copy(
- templateNodeRef,
- newParent,
- homeFolderName);
- }
- catch (FileNotFoundException e)
- {
- throw new PersonException("Invalid template to create home space");
- }
- }
- return fileInfo;
- }
-
- private NodeRef createNewParentIfRequired(NodeRef root,
- List homeFolderPath, FileFolderService fileFolderService)
- {
- if (homeFolderPath.size() > 1)
- {
- List parentPath = new ArrayList(homeFolderPath);
- parentPath.remove(parentPath.size()-1);
- return FileFolderUtil.makeFolders(fileFolderService, root,
- parentPath, ContentModel.TYPE_FOLDER).getNodeRef();
- }
- else
- {
- return root;
- }
- }
-}
+}
\ No newline at end of file
diff --git a/source/java/org/alfresco/repo/security/person/HomeFolderProvider2.java b/source/java/org/alfresco/repo/security/person/HomeFolderProvider2.java
index 36900ccf3d..d31981ca48 100644
--- a/source/java/org/alfresco/repo/security/person/HomeFolderProvider2.java
+++ b/source/java/org/alfresco/repo/security/person/HomeFolderProvider2.java
@@ -24,7 +24,7 @@ import org.alfresco.service.cmr.repository.NodeRef;
/**
* Interface for home folder providers. Instances work with the
- * {@link HomeFolderManager} (which performs most of the work)
+ * {@link PortableHomeFolderManager} (which performs most of the work)
* to allow it to create home folders in custom locations.
*
* The home folder may be a simple structure where all users share a root folder (See
@@ -94,8 +94,8 @@ public interface HomeFolderProvider2
PermissionsManager getOnReferencePermissionsManager();
/**
- * Callback from {@link HomeFolderManager} to locate or create a home folder.
- * Implementations normally call {@link HomeFolderManager.getHomeFolder}.
+ * Callback from {@link PortableHomeFolderManager} to locate or create a home folder.
+ * Implementations normally call {@link PortableHomeFolderManager.getHomeFolder}.
*/
HomeSpaceNodeRef getHomeFolder(NodeRef person);
}
diff --git a/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizer.java b/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizer.java
index 9d7dfd6e07..c5cff08223 100644
--- a/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizer.java
+++ b/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizer.java
@@ -26,7 +26,6 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.Properties;
import java.util.Set;
import org.alfresco.model.ContentModel;
@@ -102,30 +101,28 @@ public class HomeFolderProviderSynchronizer extends AbstractLifecycleBean
{
private static final Log logger = LogFactory.getLog(HomeFolderProviderSynchronizer.class);
- private static final String ENABLED_PROPERTY_NAME = "home_folder_provider_synchronizer.enabled";
- private static final String OVERRIDE_PROPERTY_NAME = "home_folder_provider_synchronizer.override_provider";
- private static final String KEEP_EMPTY_PARENTS_PROPERTY_NAME = "home_folder_provider_synchronizer.keep_empty_parents";
-
private static final String GUEST_HOME_FOLDER_PROVIDER = "guestHomeFolderProvider";
private static final String BOOTSTRAP_HOME_FOLDER_PROVIDER = "bootstrapHomeFolderProvider";
- private final Properties properties;
private final TransactionService transactionService;
private final AuthorityService authorityService;
private final PersonService personService;
private final FileFolderService fileFolderService;
private final NodeService nodeService;
- private final HomeFolderManager homeFolderManager;
+ private final PortableHomeFolderManager homeFolderManager;
private final TenantAdminService tenantAdminService;
+
+ private boolean enabled;
+ private String overrideHomeFolderProviderName;
+ private boolean keepEmptyParents;
- public HomeFolderProviderSynchronizer(Properties properties,
+ public HomeFolderProviderSynchronizer(
TransactionService transactionService,
AuthorityService authorityService, PersonService personService,
FileFolderService fileFolderService, NodeService nodeService,
- HomeFolderManager homeFolderManager,
+ PortableHomeFolderManager homeFolderManager,
TenantAdminService tenantAdminService)
{
- this.properties = properties;
this.transactionService = transactionService;
this.authorityService = authorityService;
this.personService = personService;
@@ -135,19 +132,34 @@ public class HomeFolderProviderSynchronizer extends AbstractLifecycleBean
this.tenantAdminService = tenantAdminService;
}
+ public void setEnabled(String enabled)
+ {
+ this.enabled = "true".equalsIgnoreCase(enabled);
+ }
+
private boolean enabled()
{
- return "true".equalsIgnoreCase(properties.getProperty(ENABLED_PROPERTY_NAME));
+ return enabled;
+ }
+
+ public void setOverrideHomeFolderProviderName(String overrideHomeFolderProviderName)
+ {
+ this.overrideHomeFolderProviderName = overrideHomeFolderProviderName;
}
private String getOverrideHomeFolderProviderName()
{
- return properties.getProperty(OVERRIDE_PROPERTY_NAME);
+ return overrideHomeFolderProviderName;
+ }
+
+ public void setKeepEmptyParents(String keepEmptyParents)
+ {
+ this.keepEmptyParents = "true".equalsIgnoreCase(keepEmptyParents);
}
private boolean keepEmptyParents()
{
- return "true".equalsIgnoreCase(properties.getProperty(KEEP_EMPTY_PARENTS_PROPERTY_NAME));
+ return keepEmptyParents;
}
@Override
@@ -865,7 +877,10 @@ public class HomeFolderProviderSynchronizer extends AbstractLifecycleBean
public HomeFolderHandler(NodeRef person, String overrideProviderName)
{
this.person = person;
- this.overrideProviderName = overrideProviderName;
+ this.overrideProviderName =
+ (overrideProviderName == null || overrideProviderName.trim().isEmpty())
+ ? null
+ : overrideProviderName;
}
public void doWork()
diff --git a/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizerTest.java b/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizerTest.java
index 5d54406543..498295271d 100644
--- a/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizerTest.java
+++ b/source/java/org/alfresco/repo/security/person/HomeFolderProviderSynchronizerTest.java
@@ -77,14 +77,13 @@ public class HomeFolderProviderSynchronizerTest
private static ServiceRegistry serviceRegistry;
private static TransactionService transactionService;
private static FileFolderService fileFolderService;
- private static PersonService personService;
+ private static PersonServiceImpl personService;
private static NodeService nodeService;
private static ContentService contentService;
private static AuthorityService authorityService;
private static TenantAdminService tenantAdminService;
private static TenantService tenantService;
- private static HomeFolderManager homeFolderManager;
- private static Properties properties;
+ private static PortableHomeFolderManager homeFolderManager;
private static RegexHomeFolderProvider largeHomeFolderProvider;
private static String largeHomeFolderProviderName;
private static RegexHomeFolderProvider testHomeFolderProvider;
@@ -104,18 +103,17 @@ public class HomeFolderProviderSynchronizerTest
serviceRegistry = (ServiceRegistry) applicationContext.getBean("ServiceRegistry");
transactionService = (TransactionService) applicationContext.getBean("transactionService");
fileFolderService = (FileFolderService) applicationContext.getBean("fileFolderService");
- personService = (PersonService) applicationContext.getBean("personService");
+ personService = (PersonServiceImpl) applicationContext.getBean("personService");
nodeService = (NodeService) applicationContext.getBean("nodeService");
contentService = (ContentService) applicationContext.getBean("contentService");
authorityService = (AuthorityService) applicationContext.getBean("authorityService");
tenantAdminService = (TenantAdminService) applicationContext.getBean("tenantAdminService");
tenantService = (TenantService) applicationContext.getBean("tenantService");
- homeFolderManager = (HomeFolderManager) applicationContext.getBean("homeFolderManager");
+ homeFolderManager = (PortableHomeFolderManager) applicationContext.getBean("homeFolderManager");
largeHomeFolderProvider = (RegexHomeFolderProvider) applicationContext.getBean("largeHomeFolderProvider");
largeHomeFolderProviderName = largeHomeFolderProvider.getName();
storeUrl = largeHomeFolderProvider.getStoreUrl();
origRootPath = largeHomeFolderProvider.getRootPath();
- properties = (Properties) applicationContext.getBean("global-properties");
personService.setCreateMissingPeople(true);
@@ -144,7 +142,7 @@ public class HomeFolderProviderSynchronizerTest
homeFolderManager.addProvider(testHomeFolderProvider);
homeFolderProviderSynchronizer = new HomeFolderProviderSynchronizer(
- properties, transactionService, authorityService,
+ transactionService, authorityService,
personService, fileFolderService, nodeService,
homeFolderManager, tenantAdminService);
}
@@ -152,9 +150,9 @@ public class HomeFolderProviderSynchronizerTest
@Before
public void setUp() throws Exception
{
- properties.setProperty("home_folder_provider_synchronizer.enabled", "true");
- properties.remove("home_folder_provider_synchronizer.override_provider");
- properties.remove("home_folder_provider_synchronizer.keep_empty_parents");
+ homeFolderProviderSynchronizer.setEnabled("true");
+ homeFolderProviderSynchronizer.setOverrideHomeFolderProviderName(null);
+ homeFolderProviderSynchronizer.setKeepEmptyParents("false");
largeHomeFolderProvider.setPattern("^(..)");
testHomeFolderProvider.setRootPath(origRootPath);
@@ -323,7 +321,7 @@ public class HomeFolderProviderSynchronizerTest
properties.put(ContentModel.PROP_EMAIL, emailAddress);
properties.put(ContentModel.PROP_HOME_FOLDER_PROVIDER, testHomeFolderProviderName);
properties.put(PROP_PARENT_PATH, parentPath);
- homeFolderManager.setEnableHomeFolderCreationAsPeopleAreCreated(createHomeDirectory);
+ personService.setHomeFolderCreationEager(createHomeDirectory);
NodeRef person = personService.createPerson(properties);
assertNotNull("The person nodeRef for "+domainUsername+" should have been created", person);
NodeRef homeFolder = DefaultTypeConverter.INSTANCE.convert(
@@ -598,7 +596,7 @@ public class HomeFolderProviderSynchronizerTest
public void testNotEnabled() throws Exception
{
createUser("", "fred");
- properties.remove("home_folder_provider_synchronizer.enabled");
+ homeFolderProviderSynchronizer.setEnabled("false");
moveUserHomeFolders();
@@ -670,7 +668,7 @@ public class HomeFolderProviderSynchronizerTest
public void testKeepEmptyParents() throws Exception
{
createUser("a/bb/ccc", "peter");
- properties.put("home_folder_provider_synchronizer.keep_empty_parents", "true");
+ homeFolderProviderSynchronizer.setKeepEmptyParents("true");
moveUserHomeFolders();
@@ -746,7 +744,7 @@ public class HomeFolderProviderSynchronizerTest
createFolder("Temporary3");
// Don't delete the temporary folder
- properties.put("home_folder_provider_synchronizer.keep_empty_parents", "true");
+ homeFolderProviderSynchronizer.setKeepEmptyParents("true");
moveUserHomeFolders();
@@ -837,7 +835,7 @@ public class HomeFolderProviderSynchronizerTest
moveUserHomeFolders();
assertHomeFolderLocation("fred", "fr/fred");
- properties.put("home_folder_provider_synchronizer.override_provider",
+ homeFolderProviderSynchronizer.setOverrideHomeFolderProviderName(
testHomeFolderProviderName);
moveUserHomeFolders();
@@ -851,7 +849,7 @@ public class HomeFolderProviderSynchronizerTest
public void testNoOriginalProvider() throws Exception
{
createUser("a/b/c", "fred", null, true);
- properties.put("home_folder_provider_synchronizer.override_provider",
+ homeFolderProviderSynchronizer.setOverrideHomeFolderProviderName(
largeHomeFolderProviderName);
moveUserHomeFolders();
@@ -885,7 +883,7 @@ public class HomeFolderProviderSynchronizerTest
createUser("a/b/c", "fred");
- properties.put("home_folder_provider_synchronizer.override_provider", name);
+ homeFolderProviderSynchronizer.setOverrideHomeFolderProviderName(name);
moveUserHomeFolders();
assertHomeFolderLocation("fred", "a/b/c/fred");
@@ -911,7 +909,7 @@ public class HomeFolderProviderSynchronizerTest
createUser("a/b/c", "fred");
- properties.put("home_folder_provider_synchronizer.override_provider", name);
+ homeFolderProviderSynchronizer.setOverrideHomeFolderProviderName(name);
moveUserHomeFolders();
assertHomeFolderLocation("fred", "fred");
diff --git a/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java b/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java
index 105681900f..21989ac568 100644
--- a/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java
+++ b/source/java/org/alfresco/repo/security/person/PersonServiceImpl.java
@@ -49,7 +49,6 @@ import org.alfresco.repo.node.getchildren.FilterPropString;
import org.alfresco.repo.node.getchildren.GetChildrenCannedQuery;
import org.alfresco.repo.node.getchildren.GetChildrenCannedQueryFactory;
import org.alfresco.repo.node.getchildren.FilterPropString.FilterTypeString;
-import org.alfresco.repo.policy.BehaviourFilter;
import org.alfresco.repo.policy.JavaBehaviour;
import org.alfresco.repo.policy.PolicyComponent;
import org.alfresco.repo.search.SearcherException;
@@ -135,7 +134,6 @@ public class PersonServiceImpl extends TransactionListenerAdapter implements Per
private NamespacePrefixResolver namespacePrefixResolver;
private HomeFolderManager homeFolderManager;
private PolicyComponent policyComponent;
- private BehaviourFilter policyBehaviourFilter;
private AclDAO aclDao;
private PermissionsManager permissionsManager;
private RepoAdminService repoAdminService;
@@ -161,7 +159,9 @@ public class PersonServiceImpl extends TransactionListenerAdapter implements Per
private JavaBehaviour beforeCreateNodeValidationBehaviour;
private JavaBehaviour beforeDeleteNodeValidationBehaviour;
-
+
+ private boolean homeFolderCreationEager;
+
static
{
Set props = new HashSet();
@@ -305,11 +305,6 @@ public class PersonServiceImpl extends TransactionListenerAdapter implements Per
this.policyComponent = policyComponent;
}
- public void setPolicyBehaviourFilter(BehaviourFilter policyBehaviourFilter)
- {
- this.policyBehaviourFilter = policyBehaviourFilter;
- }
-
public void setStoreUrl(String storeUrl)
{
this.storeRef = new StoreRef(storeUrl);
@@ -350,6 +345,15 @@ public class PersonServiceImpl extends TransactionListenerAdapter implements Per
this.homeFolderManager = homeFolderManager;
}
+ /**
+ * Indicates if home folders should be created when the person
+ * is created or delayed until first accessed.
+ */
+ public void setHomeFolderCreationEager(boolean homeFolderCreationEager)
+ {
+ this.homeFolderCreationEager = homeFolderCreationEager;
+ }
+
public void setAclDAO(AclDAO aclDao)
{
this.aclDao = aclDao;
@@ -809,16 +813,27 @@ public class PersonServiceImpl extends TransactionListenerAdapter implements Per
{
public Object execute() throws Throwable
{
- homeFolderManager.makeHomeFolder(ref);
+ makeHomeFolderAsSystem(ref);
return null;
}
}, transactionService.isReadOnly(), transactionService.isReadOnly() ? false : AlfrescoTransactionSupport.getTransactionReadState() == TxnReadState.TXN_READ_ONLY);
- //homeFolder = DefaultTypeConverter.INSTANCE.convert(NodeRef.class, nodeService.getProperty(person, ContentModel.PROP_HOMEFOLDER));
- //assert(homeFolder != null);
}
}
}
+ private void makeHomeFolderAsSystem(final ChildAssociationRef childAssocRef)
+ {
+ AuthenticationUtil.runAs(new AuthenticationUtil.RunAsWork