From fdc8f6f3314fb84c799b4800fe2b6682ed74fa44 Mon Sep 17 00:00:00 2001 From: Derek Hulley Date: Fri, 12 Mar 2010 19:11:12 +0000 Subject: [PATCH] Merged BRANCHES/V3.2 to HEAD: 18363: WCM clustering - ETHREEOH-3962 (duplicate root node entry) 19091: Fix Part 1 ALF-726: v3.1.x Content Cleaner Job needs to be ported to v3.2 19159: Fixed ALF-726: Migrate pre-3.2 content URLs to new format and pick up tag existing orphaned content 19169: Fix fallout from 19159 for ALF-726: Migrate pre-3.2 content URLs to new format and pick up tag existing orphaned content 19262: ALF-726 Multithreading for content URL conversion git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@19267 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261 --- config/alfresco/content-services-context.xml | 1 - config/alfresco/dao/dao-context.xml | 1 + .../alfresco/ibatis/alfresco-SqlMapConfig.xml | 1 + .../content-common-SqlMap.xml | 14 +- .../patch-common-SqlMap.xml | 106 ++ .../messages/patch-service.properties | 23 +- config/alfresco/model/systemModel.xml | 4 +- .../alfresco/patch/patch-services-context.xml | 42 +- config/alfresco/repository.properties | 6 + config/alfresco/scheduled-jobs-context.xml | 24 + config/alfresco/version.properties | 2 +- .../repo/admin/patch/AbstractPatch.java | 1083 +++++++++-------- .../patch/impl/AuthorityMigrationPatch.java | 52 +- .../patch/impl/ContentUrlConverterPatch.java | 687 +++++++++++ .../patch/impl/FixNameCrcValuesPatch.java | 47 +- .../repo/admin/registry/RegistryKey.java | 41 + .../org/alfresco/repo/avm/OrphanReaper.java | 18 +- .../alfresco/repo/avm/PlainFileNodeImpl.java | 35 +- .../alfresco/repo/batch/BatchProcessor.java | 211 ++-- .../content/cleanup/ContentStoreCleaner.java | 21 +- .../RepositoryDescriptorDAOImpl.java | 21 +- .../alfresco/repo/domain/ContentDataId.java | 57 + .../repo/domain/NodePropertyValue.java | 54 +- .../AbstractContentDataDAOImpl.java | 649 +++++----- .../domain/contentdata/ContentDataDAO.java | 138 ++- .../ibatis/ContentDataDAOImpl.java | 58 +- .../domain/patch/AbstractPatchDAOImpl.java | 110 +- .../alfresco/repo/domain/patch/PatchDAO.java | 21 +- .../domain/patch/ibatis/PatchDAOImpl.java | 74 ++ .../HibernateNodeDaoServiceImpl.java | 25 +- .../index/AVMFullIndexRecoveryComponent.java | 12 +- .../ChainingUserRegistrySynchronizer.java | 123 +- .../alfresco/util/TraceableThreadFactory.java | 3 + 33 files changed, 2589 insertions(+), 1175 deletions(-) create mode 100644 config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml create mode 100644 source/java/org/alfresco/repo/admin/patch/impl/ContentUrlConverterPatch.java create mode 100644 source/java/org/alfresco/repo/domain/ContentDataId.java diff --git a/config/alfresco/content-services-context.xml b/config/alfresco/content-services-context.xml index 6224f002c9..905158d40a 100644 --- a/config/alfresco/content-services-context.xml +++ b/config/alfresco/content-services-context.xml @@ -37,7 +37,6 @@ - diff --git a/config/alfresco/dao/dao-context.xml b/config/alfresco/dao/dao-context.xml index d32e63fb5e..387785b335 100644 --- a/config/alfresco/dao/dao-context.xml +++ b/config/alfresco/dao/dao-context.xml @@ -17,6 +17,7 @@ + diff --git a/config/alfresco/ibatis/alfresco-SqlMapConfig.xml b/config/alfresco/ibatis/alfresco-SqlMapConfig.xml index 16f7a4e0e4..bb727c18c7 100644 --- a/config/alfresco/ibatis/alfresco-SqlMapConfig.xml +++ b/config/alfresco/ibatis/alfresco-SqlMapConfig.xml @@ -11,6 +11,7 @@ + diff --git a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/content-common-SqlMap.xml b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/content-common-SqlMap.xml index 18a322781b..b7a5bd041d 100644 --- a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/content-common-SqlMap.xml +++ b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/content-common-SqlMap.xml @@ -15,6 +15,7 @@ + @@ -191,7 +192,7 @@ from alf_content_url cu where - cu.orphan_time <= #orphanTime# + cu.orphan_time <= #orphanTime# and cu.orphan_time is not null ]]> @@ -224,7 +225,7 @@ from alf_content_url where - orphan_time <= #orphanTime# + orphan_time <= #orphanTime# and orphan_time is not null ]]> @@ -247,16 +248,15 @@ - select np.long_value as id from alf_node_properties np where - np.node_id = ? and - np.qname_id = ? and - np.actual_type_n = 3 and - np.persisted_type_n = 3 + np.node_id = #idOne# and + np.qname_id in #ids[]# and + (np.actual_type_n = 3 or np.actual_type_n = 21) diff --git a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml new file mode 100644 index 0000000000..7833154f97 --- /dev/null +++ b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + update + alf_node_properties + set + actual_type_n = 21, + persisted_type_n = 3, + long_value = ?, + string_value = null + where + node_id = ? + and qname_id = ? + and list_index = ? + and locale_id = ? + + + \ No newline at end of file diff --git a/config/alfresco/messages/patch-service.properties b/config/alfresco/messages/patch-service.properties index 3757e92f38..5bfc5c4c94 100644 --- a/config/alfresco/messages/patch-service.properties +++ b/config/alfresco/messages/patch-service.properties @@ -3,7 +3,7 @@ patch.service.preceeded_by_alternative=Preceeded by alternative patch ''{0}''. patch.service.not_relevant=Not relevant to schema {0} patch.executer.checking=Checking for patches to apply ... patch.service.applying_patch=\tApplying patch ''{0}'' ({1}). -patch.progress=\t\tPatch {0}% complete, estimated complete at {1}. +patch.progress=\t\tPatch {0} {1}% complete, estimated complete at {2}. patch.validation.failed=Validation of patch ''{0}'' failed. Patch is applicable to a newer schema than the schema of this build ({1}).\nfixesToSchema: {2}\ntargetSchema: {3}.\nIf this patch should always be run once on every installation, please ensure that the ''fixesToSchema'' value is set to '''${version.schema}'''. patch.executer.no_patches_required=No patches were required. patch.executer.system_readonly=Patches cannot be applied to a read-only system. Possible incompatibilities may exist between the application code and the existing data. @@ -11,6 +11,7 @@ patch.executer.not_executed =\n=== Recorded patch (not executed) === \nID: {0}\n patch.executer.executed =\n=== Applied patch === \nID: {0}\nRESULT: \n{1}\n===================================== patch.executer.failed =\n=== Failed to apply patch === \nID: {0}\nRESULT: \n{1}\n===================================== patch.noLongerSupportedPatch.err.use_incremental_upgrade = \nPatch ''{0}'' was last supported on version {1}.\n Please follow an incremental upgrade using version {2}. + # General patch messages patch.genericBootstrap.result.exists=Bootstrap location already exists: {0} @@ -19,6 +20,8 @@ patch.genericBootstrap.err.multiple_found=Multiple nodes found: {0} patch.general.property_not_set=Patch property ''{0}'' has not been set on this patch: {1} +patch.QNamePatch.result=Successfully updated the ''{0}'' QName to ''{1}''. + # Individual patch messages patch.noOpPatch.description=A placeholder patch; usually marks a superceded patch. @@ -301,4 +304,20 @@ patch.redeployNominatedInvitationProcessWithPropsForShare.description=Redeploy n patch.redeployNominatedInvitationProcessWithPropsForShare.result=Nominated invitation workflow redeployed patch.thumbnailsAssocQName.description=Update the 'cm:thumbnails' association QName to 'rn:rendition'. -patch.QNamePatch.result=Successfully updated the ''{0}'' QName to ''{1}''. + +patch.convertContentUrls.description=Converts pre-3.2 content URLs to use the alf_content_data table. The conversion work can also be done on a schedule; please contact Alfresco Support for further details. +patch.convertContentUrls.bypassingPatch=Content URL conversion was NOT performed by this patch. Activate the scheduled job 'contentUrlConverterTrigger'. +patch.convertContentUrls.start=Content URL conversion progress: +patch.convertContentUrls.inProgress=Content URL conversion increment completed. Awaiting next scheduled call... +patch.convertContentUrls.done=Content URL conversion completed. +patch.convertContentUrls.adm.start=\tProcessing ADM Content URLs. +patch.convertContentUrls.adm.done=\tFinished processing ADM nodes up to ID {0}. +patch.convertContentUrls.avm.start=\tProcessing AVM Content URLs. +patch.convertContentUrls.avm.done=\tFinished processing AVM nodes up to ID {0}. +patch.convertContentUrls.store.start=\tReading content URLs from store {0}. +patch.convertContentUrls.store.readOnly=\tNo content URLs will be marked for deletion. The content store is read-only. +patch.convertContentUrls.store.pending=\tContent URLs will be marked for deletion once the URL conversion process is complete. +patch.convertContentUrls.store.noSupport=\tNo content URLs will be marked for deletion. The store does not support URL enumeration. +patch.convertContentUrls.store.progress=\t\tProcessed {0} content URLs from store. +patch.convertContentUrls.store.scheduled=\tScheduled {0} content URLs for deletion from store: {1} +patch.convertContentUrls.store.done=This job is complete. Deactivate the scheduled job 'contentUrlConverterTrigger'. diff --git a/config/alfresco/model/systemModel.xml b/config/alfresco/model/systemModel.xml index 3e8b6b89d6..41f99a7d6c 100644 --- a/config/alfresco/model/systemModel.xml +++ b/config/alfresco/model/systemModel.xml @@ -56,8 +56,8 @@ 0 - d:any - true + d:content + false d:content diff --git a/config/alfresco/patch/patch-services-context.xml b/config/alfresco/patch/patch-services-context.xml index be14ae1154..63c770e875 100644 --- a/config/alfresco/patch/patch-services-context.xml +++ b/config/alfresco/patch/patch-services-context.xml @@ -1957,6 +1957,7 @@ 0 3006 3007 + false @@ -1970,6 +1971,9 @@ + + + @@ -2100,5 +2104,41 @@ - + + + patch.convertContentUrls + patch.convertContentUrls.description + 0 + 3499 + 3500 + false + + + + + + + + + + + + + + + + + + + + ${system.content.contentUrlConverter.threadCount} + + + ${system.content.contentUrlConverter.batchSize} + + + ${system.content.contentUrlConverter.runAsScheduledJob} + + + \ No newline at end of file diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties index 2b8e5d53fe..3a25f24dd6 100644 --- a/config/alfresco/repository.properties +++ b/config/alfresco/repository.properties @@ -142,6 +142,12 @@ system.content.eagerOrphanCleanup=false system.content.orphanProtectDays=14 # The CRON expression to trigger the deletion of resources associated with orphaned content. system.content.orphanCleanup.cronExpression=0 0 4 * * ? +# The CRON expression to trigger content URL conversion. This process is not intesive and can +# be triggered on a live system. Similarly, it can be triggered using JMX on a dedicated machine. +system.content.contentUrlConverter.cronExpression=* * * * * ? 2099 +system.content.contentUrlConverter.threadCount=2 +system.content.contentUrlConverter.batchSize=500 +system.content.contentUrlConverter.runAsScheduledJob=false # #################### # # Lucene configuration # diff --git a/config/alfresco/scheduled-jobs-context.xml b/config/alfresco/scheduled-jobs-context.xml index 64283fa76a..0477f672c5 100644 --- a/config/alfresco/scheduled-jobs-context.xml +++ b/config/alfresco/scheduled-jobs-context.xml @@ -109,6 +109,30 @@ + + + org.alfresco.repo.admin.patch.impl.ContentUrlConverterPatch$ContentUrlConverterJob + + + + + + + + + + + + + + + + + + ${system.content.contentUrlConverter.cronExpression} + + + org.alfresco.repo.node.cleanup.NodeCleanupJob diff --git a/config/alfresco/version.properties b/config/alfresco/version.properties index b548957d3a..1ada6cea2b 100644 --- a/config/alfresco/version.properties +++ b/config/alfresco/version.properties @@ -19,4 +19,4 @@ version.build=@build-number@ # Schema number -version.schema=4005 +version.schema=4006 diff --git a/source/java/org/alfresco/repo/admin/patch/AbstractPatch.java b/source/java/org/alfresco/repo/admin/patch/AbstractPatch.java index 29318d55ee..520a7880ad 100644 --- a/source/java/org/alfresco/repo/admin/patch/AbstractPatch.java +++ b/source/java/org/alfresco/repo/admin/patch/AbstractPatch.java @@ -1,4 +1,4 @@ -/* +/* * Copyright (C) 2005-2010 Alfresco Software Limited. * * This file is part of Alfresco @@ -14,245 +14,256 @@ * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.admin.patch; - -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.Collections; -import java.util.Date; -import java.util.List; - -import org.alfresco.error.AlfrescoRuntimeException; -import org.springframework.extensions.surf.util.I18NUtil; -import org.alfresco.repo.node.integrity.IntegrityChecker; -import org.alfresco.repo.security.authentication.AuthenticationContext; -import org.alfresco.repo.security.authentication.AuthenticationUtil; -import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork; -import org.alfresco.repo.tenant.Tenant; -import org.alfresco.repo.tenant.TenantAdminService; -import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.service.cmr.admin.PatchException; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.cmr.search.SearchService; -import org.alfresco.service.namespace.NamespaceService; -import org.alfresco.service.transaction.TransactionService; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -/** - * Base implementation of the patch. This class ensures that the patch is thread- and transaction-safe. - * - * @author Derek Hulley - */ -public abstract class AbstractPatch implements Patch -{ - /** - * I18N message when properties not set. - *
    - *
  • {0} = property name
  • - *
  • {1} = patch instance
  • - *
- */ - public static final String ERR_PROPERTY_NOT_SET = "patch.general.property_not_set"; - private static final String MSG_PROGRESS = "patch.progress"; - - private static final long RANGE_10 = 1000 * 60 * 90; - private static final long RANGE_5 = 1000 * 60 * 60 * 4; - private static final long RANGE_2 = 1000 * 60 * 90 * 10; - - private static Log logger = LogFactory.getLog(AbstractPatch.class); - private static Log progress_logger = LogFactory.getLog(PatchExecuter.class); - - private String id; - private int fixesFromSchema; - private int fixesToSchema; - private int targetSchema; + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.admin.patch; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.Collections; +import java.util.Date; +import java.util.List; + +import org.alfresco.error.AlfrescoRuntimeException; +import org.springframework.extensions.surf.util.I18NUtil; +import org.alfresco.repo.node.integrity.IntegrityChecker; +import org.alfresco.repo.security.authentication.AuthenticationContext; +import org.alfresco.repo.security.authentication.AuthenticationUtil; +import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork; +import org.alfresco.repo.tenant.Tenant; +import org.alfresco.repo.tenant.TenantAdminService; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.service.cmr.admin.PatchException; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.search.SearchService; +import org.alfresco.service.namespace.NamespaceService; +import org.alfresco.service.transaction.TransactionService; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationEventPublisherAware; + +/** + * Base implementation of the patch. This class ensures that the patch is thread- and transaction-safe. + * + * @author Derek Hulley + */ +public abstract class AbstractPatch implements Patch, ApplicationEventPublisherAware +{ + /** + * I18N message when properties not set. + *
    + *
  • {0} = property name
  • + *
  • {1} = patch instance
  • + *
+ */ + public static final String ERR_PROPERTY_NOT_SET = "patch.general.property_not_set"; + private static final String MSG_PROGRESS = "patch.progress"; + + private static final long RANGE_10 = 1000 * 60 * 90; + private static final long RANGE_5 = 1000 * 60 * 60 * 4; + private static final long RANGE_2 = 1000 * 60 * 90 * 10; + + private static Log logger = LogFactory.getLog(AbstractPatch.class); + private static Log progress_logger = LogFactory.getLog(PatchExecuter.class); + + private String id; + private int fixesFromSchema; + private int fixesToSchema; + private int targetSchema; private boolean force; - private String description; - /** a list of patches that this one depends on */ - private List dependsOn; - /** a list of patches that, if already present, mean that this one should be ignored */ - private List alternatives; - /** flag indicating if the patch was successfully applied */ - private boolean applied; - private boolean applyToTenants; - /** track completion * */ - int percentComplete = 0; - /** start time * */ - long startTime; - - /** the service to register ourselves with */ - private PatchService patchService; - /** used to ensure a unique transaction per execution */ - protected TransactionService transactionService; - protected NamespaceService namespaceService; - protected NodeService nodeService; - protected SearchService searchService; - protected AuthenticationContext authenticationContext; - protected TenantAdminService tenantAdminService; - - - public AbstractPatch() - { - this.fixesFromSchema = -1; - this.fixesToSchema = -1; - this.targetSchema = -1; + private String description; + /** a list of patches that this one depends on */ + private List dependsOn; + /** a list of patches that, if already present, mean that this one should be ignored */ + private List alternatives; + /** flag indicating if the patch was successfully applied */ + private boolean applied; + private boolean applyToTenants; + /** track completion * */ + int percentComplete = 0; + /** start time * */ + long startTime; + + /** the service to register ourselves with */ + private PatchService patchService; + /** used to ensure a unique transaction per execution */ + protected TransactionService transactionService; + protected NamespaceService namespaceService; + protected NodeService nodeService; + protected SearchService searchService; + protected AuthenticationContext authenticationContext; + protected TenantAdminService tenantAdminService; + /** Publishes batch event notifications for JMX viewing */ + protected ApplicationEventPublisher applicationEventPublisher; + + public AbstractPatch() + { + this.fixesFromSchema = -1; + this.fixesToSchema = -1; + this.targetSchema = -1; this.force = false; - this.applied = false; - this.applyToTenants = true; // by default, apply to each tenant, if tenant service is enabled - this.dependsOn = Collections.emptyList(); - this.alternatives = Collections.emptyList(); - } - - @Override - public String toString() - { - StringBuilder sb = new StringBuilder(256); - sb.append("Patch") - .append("[ id=").append(id) - .append(", description=").append(description) - .append(", fixesFromSchema=").append(fixesFromSchema) - .append(", fixesToSchema=").append(fixesToSchema) - .append(", targetSchema=").append(targetSchema) - .append("]"); - return sb.toString(); - } - - /** - * Set the service that this patch will register with for execution. - */ - public void setPatchService(PatchService patchService) - { - this.patchService = patchService; - } - - /** - * Set the transaction provider so that each execution can be performed within a transaction - */ - public void setTransactionService(TransactionService transactionService) - { - this.transactionService = transactionService; - } - - public void setNamespaceService(NamespaceService namespaceService) - { - this.namespaceService = namespaceService; - } - - public void setNodeService(NodeService nodeService) - { - this.nodeService = nodeService; - } - - public void setSearchService(SearchService searchService) - { - this.searchService = searchService; - } - - public void setAuthenticationContext(AuthenticationContext authenticationContext) - { - this.authenticationContext = authenticationContext; - } - - public void setTenantAdminService(TenantAdminService tenantAdminService) - { - this.tenantAdminService = tenantAdminService; - } - - /** - * This ensures that this bean gets registered with the appropriate {@link PatchService service}. - */ - public void init() - { - if (patchService == null) - { - throw new AlfrescoRuntimeException("Mandatory property not set: patchService"); - } - patchService.registerPatch(this); - } - - public String getId() - { - return id; - } - - /** - * @param id - * the unique ID of the patch. This dictates the order in which patches are applied. - */ - public void setId(String id) - { - this.id = id; - } - - public int getFixesFromSchema() - { - return fixesFromSchema; - } - - /** - * Set the smallest schema number that this patch may be applied to. - * - * @param version - * a schema number not smaller than 0 - */ - public void setFixesFromSchema(int version) - { - if (version < 0) - { - throw new IllegalArgumentException("The 'fixesFromSchema' property may not be less than 0"); - } - this.fixesFromSchema = version; - // auto-adjust the to version - if (fixesToSchema < fixesFromSchema) - { - setFixesToSchema(this.fixesFromSchema); - } - } - - public int getFixesToSchema() - { - return fixesToSchema; - } - - /** - * Set the largest schema version number that this patch may be applied to. - * - * @param version - * a schema version number not smaller than the {@link #setFixesFromSchema(int) from version} number. - */ - public void setFixesToSchema(int version) - { - if (version < fixesFromSchema) - { - throw new IllegalArgumentException("'fixesToSchema' must be greater than or equal to 'fixesFromSchema'"); - } - this.fixesToSchema = version; - } - - public int getTargetSchema() - { - return targetSchema; - } - - /** - * Set the schema version that this patch attempts to take the existing schema to. This is for informational - * purposes only, acting as an indicator of intention rather than having any specific effect. - * - * @param version - * a schema version number that must be greater than the {@link #fixesToSchema max fix schema number} - */ - public void setTargetSchema(int version) - { - if (version <= fixesToSchema) - { - throw new IllegalArgumentException("'targetSchema' must be greater than 'fixesToSchema'"); - } - this.targetSchema = version; - } - + this.applied = false; + this.applyToTenants = true; // by default, apply to each tenant, if tenant service is enabled + this.dependsOn = Collections.emptyList(); + this.alternatives = Collections.emptyList(); + } + + @Override + public String toString() + { + StringBuilder sb = new StringBuilder(256); + sb.append("Patch") + .append("[ id=").append(id) + .append(", description=").append(description) + .append(", fixesFromSchema=").append(fixesFromSchema) + .append(", fixesToSchema=").append(fixesToSchema) + .append(", targetSchema=").append(targetSchema) + .append("]"); + return sb.toString(); + } + + /** + * Set the service that this patch will register with for execution. + */ + public void setPatchService(PatchService patchService) + { + this.patchService = patchService; + } + + /** + * Set the transaction provider so that each execution can be performed within a transaction + */ + public void setTransactionService(TransactionService transactionService) + { + this.transactionService = transactionService; + } + + public void setNamespaceService(NamespaceService namespaceService) + { + this.namespaceService = namespaceService; + } + + public void setNodeService(NodeService nodeService) + { + this.nodeService = nodeService; + } + + public void setSearchService(SearchService searchService) + { + this.searchService = searchService; + } + + public void setAuthenticationContext(AuthenticationContext authenticationContext) + { + this.authenticationContext = authenticationContext; + } + + public void setTenantAdminService(TenantAdminService tenantAdminService) + { + this.tenantAdminService = tenantAdminService; + } + + /** + * Set automatically + */ + public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher) + { + this.applicationEventPublisher = applicationEventPublisher; + } + + /** + * This ensures that this bean gets registered with the appropriate {@link PatchService service}. + */ + public void init() + { + if (patchService == null) + { + throw new AlfrescoRuntimeException("Mandatory property not set: patchService"); + } + patchService.registerPatch(this); + } + + public String getId() + { + return id; + } + + /** + * @param id + * the unique ID of the patch. This dictates the order in which patches are applied. + */ + public void setId(String id) + { + this.id = id; + } + + public int getFixesFromSchema() + { + return fixesFromSchema; + } + + /** + * Set the smallest schema number that this patch may be applied to. + * + * @param version + * a schema number not smaller than 0 + */ + public void setFixesFromSchema(int version) + { + if (version < 0) + { + throw new IllegalArgumentException("The 'fixesFromSchema' property may not be less than 0"); + } + this.fixesFromSchema = version; + // auto-adjust the to version + if (fixesToSchema < fixesFromSchema) + { + setFixesToSchema(this.fixesFromSchema); + } + } + + public int getFixesToSchema() + { + return fixesToSchema; + } + + /** + * Set the largest schema version number that this patch may be applied to. + * + * @param version + * a schema version number not smaller than the {@link #setFixesFromSchema(int) from version} number. + */ + public void setFixesToSchema(int version) + { + if (version < fixesFromSchema) + { + throw new IllegalArgumentException("'fixesToSchema' must be greater than or equal to 'fixesFromSchema'"); + } + this.fixesToSchema = version; + } + + public int getTargetSchema() + { + return targetSchema; + } + + /** + * Set the schema version that this patch attempts to take the existing schema to. This is for informational + * purposes only, acting as an indicator of intention rather than having any specific effect. + * + * @param version + * a schema version number that must be greater than the {@link #fixesToSchema max fix schema number} + */ + public void setTargetSchema(int version) + { + if (version <= fixesToSchema) + { + throw new IllegalArgumentException("'targetSchema' must be greater than 'fixesToSchema'"); + } + this.targetSchema = version; + } + /** * {@inheritDoc} */ @@ -272,302 +283,304 @@ public abstract class AbstractPatch implements Patch this.force = force; } - public String getDescription() - { - return description; - } - - /** - * @param description - * a thorough description of the patch - */ - public void setDescription(String description) - { - this.description = description; - } - - public List getDependsOn() - { - return this.dependsOn; - } - - /** - * Set all the dependencies for this patch. It should not be executed before all the dependencies have been applied. - * - * @param dependsOn - * a list of dependencies - */ - public void setDependsOn(List dependsOn) - { - this.dependsOn = dependsOn; - } - - public List getAlternatives() - { - return alternatives; - } - - /** - * Set all anti-dependencies. If any of the patches in the list have already been executed, then - * this one need not be. - * - * @param alternatives a list of alternative patches - */ - public void setAlternatives(List alternatives) - { - this.alternatives = alternatives; - } - - public boolean applies(int version) - { - return ((this.fixesFromSchema <= version) && (version <= fixesToSchema)); - } - - /** - * Performs a null check on the supplied value. - * - * @param value - * value to check - * @param name - * name of the property to report - */ - protected final void checkPropertyNotNull(Object value, String name) - { - if (value == null) - { - throw new PatchException(ERR_PROPERTY_NOT_SET, name, this); - } - } - - public void setApplyToTenants(boolean applyToTenants) - { - this.applyToTenants = applyToTenants; - } - - /** - * Check that the schema version properties have been set appropriately. Derived classes can override this method to - * perform their own validation provided that this method is called by the derived class. - */ - protected void checkProperties() - { - // check that the necessary properties have been set - checkPropertyNotNull(id, "id"); - checkPropertyNotNull(description, "description"); - checkPropertyNotNull(transactionService, "transactionService"); - checkPropertyNotNull(namespaceService, "namespaceService"); - checkPropertyNotNull(nodeService, "nodeService"); - checkPropertyNotNull(searchService, "searchService"); - checkPropertyNotNull(authenticationContext, "authenticationContext"); - if (fixesFromSchema == -1 || fixesToSchema == -1 || targetSchema == -1) - { - throw new AlfrescoRuntimeException( - "Patch properties 'fixesFromSchema', 'fixesToSchema' and 'targetSchema' " + - "have not all been set on this patch: \n" - + " patch: " + this); - } - } - - /** - * Sets up the transaction and ensures thread-safety. - * - * @see #applyInternal() - */ - public synchronized String apply() throws PatchException - { - // ensure that this has not been executed already - if (applied) - { - throw new AlfrescoRuntimeException("The patch has already been executed: \n" + " patch: " + this); - } - // check properties - checkProperties(); - // execute in a transaction - try - { - if (logger.isDebugEnabled()) - { - logger.debug("\n" + "Patch will be applied: \n" + " patch: " + this); - } - AuthenticationUtil.RunAsWork authorisedPathWork = new AuthenticationUtil.RunAsWork() - { - public String doWork() throws Exception - { - RetryingTransactionCallback patchWork = new RetryingTransactionCallback() - { - public String execute() throws Exception - { - - // downgrade integrity checking - IntegrityChecker.setWarnInTransaction(); - - String report = applyInternal(); - - if ((tenantAdminService != null) && tenantAdminService.isEnabled() && applyToTenants) - { - List tenants = tenantAdminService.getAllTenants(); - for (Tenant tenant : tenants) - { - String tenantDomain = tenant.getTenantDomain(); - String tenantReport = AuthenticationUtil.runAs(new RunAsWork() - { - public String doWork() throws Exception - { - return applyInternal(); - } - }, tenantAdminService.getDomainUser(AuthenticationUtil.getSystemUserName(), tenantDomain)); - - report = report + "\n" + tenantReport + " (for tenant: " + tenantDomain + ")"; - } - - return report; - } - - // done - return report; - } - }; - return transactionService.getRetryingTransactionHelper().doInTransaction(patchWork); - } - }; - startTime = System.currentTimeMillis(); - String report = AuthenticationUtil.runAs(authorisedPathWork, AuthenticationUtil.getSystemUserName()); - // the patch was successfully applied - applied = true; - // done - if (logger.isDebugEnabled()) - { - logger.debug("\n" + "Patch successfully applied: \n" + " patch: " + this + "\n" + " report: " + report); - } - return report; - } - catch (PatchException e) - { - // no need to extract the exception - throw e; - } - catch (Throwable e) - { - // check whether there is an embedded patch exception - Throwable cause = e.getCause(); - if (cause != null && cause instanceof PatchException) - { - throw (PatchException) cause; - } - // need to generate a message from the exception - String report = makeReport(e); - // generate the correct exception - throw new PatchException(report); - } - } - - /** - * Dumps the error's full message and trace to the String - * - * @param e - * the throwable - * @return Returns a String representative of the printStackTrace method - */ - private String makeReport(Throwable e) - { - StringWriter stringWriter = new StringWriter(1024); - PrintWriter printWriter = new PrintWriter(stringWriter, true); - try - { - e.printStackTrace(printWriter); - return stringWriter.toString(); - } - finally - { - printWriter.close(); - } - } - - /** - * This method does the work. All transactions and thread-safety will be taken care of by this class. Any exception - * will result in the transaction being rolled back. Integrity checks are downgraded for the duration of the - * transaction. - * - * @return Returns the report (only success messages). - * @see #apply() - * @throws Exception - * anything can be thrown. This must be used for all failures. - */ - protected abstract String applyInternal() throws Exception; - - /** - * Support to report patch completion and estimated completion time. - * - * @param estimatedTotal - * @param currentInteration - */ - protected void reportProgress(long estimatedTotal, long currentInteration) - { - if (progress_logger.isDebugEnabled()) - { - progress_logger.debug(currentInteration + "/" + estimatedTotal); - } - if (currentInteration == 0) - { - // No point reporting the start - we have already done that elsewhere .... - percentComplete = 0; - } - else if (currentInteration * 100l / estimatedTotal > percentComplete) - { - int previous = percentComplete; - percentComplete = (int) (currentInteration * 100l / estimatedTotal); - - if (percentComplete < 100) - { - // conditional report - - long currentTime = System.currentTimeMillis(); - long timeSoFar = currentTime - startTime; - long timeRemaining = timeSoFar * (100 - percentComplete) / percentComplete; - - int report = -1; - - if (timeRemaining > 60000) - { + public String getDescription() + { + return description; + } + + /** + * @param description + * a thorough description of the patch + */ + public void setDescription(String description) + { + this.description = description; + } + + public List getDependsOn() + { + return this.dependsOn; + } + + /** + * Set all the dependencies for this patch. It should not be executed before all the dependencies have been applied. + * + * @param dependsOn + * a list of dependencies + */ + public void setDependsOn(List dependsOn) + { + this.dependsOn = dependsOn; + } + + public List getAlternatives() + { + return alternatives; + } + + /** + * Set all anti-dependencies. If any of the patches in the list have already been executed, then + * this one need not be. + * + * @param alternatives a list of alternative patches + */ + public void setAlternatives(List alternatives) + { + this.alternatives = alternatives; + } + + public boolean applies(int version) + { + return ((this.fixesFromSchema <= version) && (version <= fixesToSchema)); + } + + /** + * Performs a null check on the supplied value. + * + * @param value + * value to check + * @param name + * name of the property to report + */ + protected final void checkPropertyNotNull(Object value, String name) + { + if (value == null) + { + throw new PatchException(ERR_PROPERTY_NOT_SET, name, this); + } + } + + public void setApplyToTenants(boolean applyToTenants) + { + this.applyToTenants = applyToTenants; + } + + /** + * Check that the schema version properties have been set appropriately. Derived classes can override this method to + * perform their own validation provided that this method is called by the derived class. + */ + protected void checkProperties() + { + // check that the necessary properties have been set + checkPropertyNotNull(id, "id"); + checkPropertyNotNull(description, "description"); + checkPropertyNotNull(transactionService, "transactionService"); + checkPropertyNotNull(namespaceService, "namespaceService"); + checkPropertyNotNull(nodeService, "nodeService"); + checkPropertyNotNull(searchService, "searchService"); + checkPropertyNotNull(authenticationContext, "authenticationContext"); + checkPropertyNotNull(tenantAdminService, "tenantAdminService"); + checkPropertyNotNull(applicationEventPublisher, "applicationEventPublisher"); + if (fixesFromSchema == -1 || fixesToSchema == -1 || targetSchema == -1) + { + throw new AlfrescoRuntimeException( + "Patch properties 'fixesFromSchema', 'fixesToSchema' and 'targetSchema' " + + "have not all been set on this patch: \n" + + " patch: " + this); + } + } + + /** + * Sets up the transaction and ensures thread-safety. + * + * @see #applyInternal() + */ + public synchronized String apply() throws PatchException + { + // ensure that this has not been executed already + if (applied) + { + throw new AlfrescoRuntimeException("The patch has already been executed: \n" + " patch: " + this); + } + // check properties + checkProperties(); + // execute in a transaction + try + { + if (logger.isDebugEnabled()) + { + logger.debug("\n" + "Patch will be applied: \n" + " patch: " + this); + } + AuthenticationUtil.RunAsWork authorisedPathWork = new AuthenticationUtil.RunAsWork() + { + public String doWork() throws Exception + { + RetryingTransactionCallback patchWork = new RetryingTransactionCallback() + { + public String execute() throws Exception + { + + // downgrade integrity checking + IntegrityChecker.setWarnInTransaction(); + + String report = applyInternal(); + + if ((tenantAdminService != null) && tenantAdminService.isEnabled() && applyToTenants) + { + List tenants = tenantAdminService.getAllTenants(); + for (Tenant tenant : tenants) + { + String tenantDomain = tenant.getTenantDomain(); + String tenantReport = AuthenticationUtil.runAs(new RunAsWork() + { + public String doWork() throws Exception + { + return applyInternal(); + } + }, tenantAdminService.getDomainUser(AuthenticationUtil.getSystemUserName(), tenantDomain)); + + report = report + "\n" + tenantReport + " (for tenant: " + tenantDomain + ")"; + } + + return report; + } + + // done + return report; + } + }; + return transactionService.getRetryingTransactionHelper().doInTransaction(patchWork); + } + }; + startTime = System.currentTimeMillis(); + String report = AuthenticationUtil.runAs(authorisedPathWork, AuthenticationUtil.getSystemUserName()); + // the patch was successfully applied + applied = true; + // done + if (logger.isDebugEnabled()) + { + logger.debug("\n" + "Patch successfully applied: \n" + " patch: " + this + "\n" + " report: " + report); + } + return report; + } + catch (PatchException e) + { + // no need to extract the exception + throw e; + } + catch (Throwable e) + { + // check whether there is an embedded patch exception + Throwable cause = e.getCause(); + if (cause != null && cause instanceof PatchException) + { + throw (PatchException) cause; + } + // need to generate a message from the exception + String report = makeReport(e); + // generate the correct exception + throw new PatchException(report); + } + } + + /** + * Dumps the error's full message and trace to the String + * + * @param e + * the throwable + * @return Returns a String representative of the printStackTrace method + */ + private String makeReport(Throwable e) + { + StringWriter stringWriter = new StringWriter(1024); + PrintWriter printWriter = new PrintWriter(stringWriter, true); + try + { + e.printStackTrace(printWriter); + return stringWriter.toString(); + } + finally + { + printWriter.close(); + } + } + + /** + * This method does the work. All transactions and thread-safety will be taken care of by this class. Any exception + * will result in the transaction being rolled back. Integrity checks are downgraded for the duration of the + * transaction. + * + * @return Returns the report (only success messages). + * @see #apply() + * @throws Exception + * anything can be thrown. This must be used for all failures. + */ + protected abstract String applyInternal() throws Exception; + + /** + * Support to report patch completion and estimated completion time. + * + * @param estimatedTotal + * @param currentInteration + */ + protected void reportProgress(long estimatedTotal, long currentInteration) + { + if (progress_logger.isDebugEnabled()) + { + progress_logger.debug(currentInteration + "/" + estimatedTotal); + } + if (currentInteration == 0) + { + // No point reporting the start - we have already done that elsewhere .... + percentComplete = 0; + } + else if (currentInteration * 100l / estimatedTotal > percentComplete) + { + int previous = percentComplete; + percentComplete = (int) (currentInteration * 100l / estimatedTotal); + + if (percentComplete < 100) + { + // conditional report + + long currentTime = System.currentTimeMillis(); + long timeSoFar = currentTime - startTime; + long timeRemaining = timeSoFar * (100 - percentComplete) / percentComplete; + + int report = -1; + + if (timeRemaining > 60000) + { int reportInterval = getReportingInterval(timeSoFar, timeRemaining); - - for (int i = previous + 1; i <= percentComplete; i++) - { - if (i % reportInterval == 0) - { - report = i; - } - } - if (report > 0) - { - Date end = new Date(currentTime + timeRemaining); - - String msg = I18NUtil.getMessage(MSG_PROGRESS, report, end); - progress_logger.info(msg); - } - } - } - } - } - + + for (int i = previous + 1; i <= percentComplete; i++) + { + if (i % reportInterval == 0) + { + report = i; + } + } + if (report > 0) + { + Date end = new Date(currentTime + timeRemaining); + + String msg = I18NUtil.getMessage(MSG_PROGRESS, getId(), report, end); + progress_logger.info(msg); + } + } + } + } + } + private int getReportingInterval(long soFar, long toGo) - { - long total = soFar + toGo; - if (total < RANGE_10) - { - return 10; - } - else if (total < RANGE_5) - { - return 5; - } - else if (total < RANGE_2) - { - return 2; - } - else - { - return 1; - } - - } -} + { + long total = soFar + toGo; + if (total < RANGE_10) + { + return 10; + } + else if (total < RANGE_5) + { + return 5; + } + else if (total < RANGE_2) + { + return 2; + } + else + { + return 1; + } + + } +} diff --git a/source/java/org/alfresco/repo/admin/patch/impl/AuthorityMigrationPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/AuthorityMigrationPatch.java index 2b280f9352..50879efed6 100644 --- a/source/java/org/alfresco/repo/admin/patch/impl/AuthorityMigrationPatch.java +++ b/source/java/org/alfresco/repo/admin/patch/impl/AuthorityMigrationPatch.java @@ -49,8 +49,6 @@ import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.RegexQNamePattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.context.ApplicationEventPublisherAware; /** * Migrates authority information previously stored in the user store to the spaces store, using the new structure used @@ -58,7 +56,7 @@ import org.springframework.context.ApplicationEventPublisherAware; * * @author dward */ -public class AuthorityMigrationPatch extends AbstractPatch implements ApplicationEventPublisherAware +public class AuthorityMigrationPatch extends AbstractPatch { /** The title we give to the batch process in progress messages / JMX. */ private static final String MSG_PROCESS_NAME = "patch.authorityMigration.process.name"; @@ -91,9 +89,6 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio /** The user bootstrap. */ private ImporterBootstrap userBootstrap; - /** The application event publisher. */ - private ApplicationEventPublisher applicationEventPublisher; - /** * Sets the authority service. * @@ -127,17 +122,6 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio this.userBootstrap = userBootstrap; } - /** - * Sets the application event publisher. - * - * @param applicationEventPublisher - * the application event publisher - */ - public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher) - { - this.applicationEventPublisher = applicationEventPublisher; - } - /** * Recursively retrieves the authorities under the given node and their associations. * @@ -238,14 +222,33 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio */ private void migrateAuthorities(final Map authoritiesToCreate, Map> parentAssocs) { - BatchProcessor.Worker>> worker = new BatchProcessor.Worker>>() - { + final String tenantDomain = tenantAdminService.getCurrentUserDomain(); + BatchProcessor.BatchProcessWorker>> worker = new BatchProcessor.BatchProcessWorker>>() + { public String getIdentifier(Entry> entry) { return entry.getKey(); } + public void beforeProcess() throws Throwable + { + // Disable rules + ruleService.disableRules(); + // Authentication + String systemUser = AuthenticationUtil.getSystemUserName(); + systemUser = tenantAdminService.getDomainUser(systemUser, tenantDomain); + AuthenticationUtil.setRunAsUser(systemUser); + } + + public void afterProcess() throws Throwable + { + // Enable rules + ruleService.enableRules(); + // Clear authentication + AuthenticationUtil.clearCurrentSecurityContext(); + } + public void process(Entry> authority) throws Throwable { String authorityName = authority.getKey(); @@ -290,10 +293,13 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio } }; // Migrate using 2 threads, 20 authorities per transaction. Log every 100 entries. - new BatchProcessor>>(AuthorityMigrationPatch.progress_logger, - this.transactionService.getRetryingTransactionHelper(), this.ruleService, this.tenantAdminService, - this.applicationEventPublisher, parentAssocs.entrySet(), I18NUtil - .getMessage(AuthorityMigrationPatch.MSG_PROCESS_NAME), 100, 2, 20).process(worker, true); + new BatchProcessor>>( + I18NUtil.getMessage(AuthorityMigrationPatch.MSG_PROCESS_NAME), + this.transactionService.getRetryingTransactionHelper(), + parentAssocs.entrySet(), + 2, 20, + AuthorityMigrationPatch.this.applicationEventPublisher, + AuthorityMigrationPatch.progress_logger, 100).process(worker, true); } /** diff --git a/source/java/org/alfresco/repo/admin/patch/impl/ContentUrlConverterPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/ContentUrlConverterPatch.java new file mode 100644 index 0000000000..4b30f33483 --- /dev/null +++ b/source/java/org/alfresco/repo/admin/patch/impl/ContentUrlConverterPatch.java @@ -0,0 +1,687 @@ +/* +* Copyright (C) 2005-2010 Alfresco Software Limited. +* +* This file is part of Alfresco +* +* Alfresco is free software: you can redistribute it and/or modify +* it under the terms of the GNU Lesser General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* +* Alfresco is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU Lesser General Public License for more details. +* +* You should have received a copy of the GNU Lesser General Public License +* along with Alfresco. If not, see . +*/ +package org.alfresco.repo.admin.patch.impl; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.repo.admin.patch.AbstractPatch; +import org.alfresco.repo.admin.patch.PatchExecuter; +import org.alfresco.repo.admin.registry.RegistryKey; +import org.alfresco.repo.admin.registry.RegistryService; +import org.alfresco.repo.avm.AVMDAOs; +import org.alfresco.repo.avm.PlainFileNode; +import org.alfresco.repo.batch.BatchProcessor; +import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorkerAdaptor; +import org.alfresco.repo.content.ContentStore; +import org.alfresco.repo.content.ContentStore.ContentUrlHandler; +import org.alfresco.repo.domain.contentdata.ContentDataDAO; +import org.alfresco.repo.domain.patch.PatchDAO; +import org.alfresco.repo.lock.JobLockService; +import org.alfresco.repo.lock.LockAcquisitionException; +import org.alfresco.repo.node.db.NodeDaoService; +import org.alfresco.repo.security.authentication.AuthenticationUtil; +import org.alfresco.repo.transaction.AlfrescoTransactionSupport; +import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState; +import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.service.ServiceRegistry; +import org.alfresco.service.cmr.repository.ContentData; +import org.alfresco.service.cmr.repository.ContentReader; +import org.alfresco.service.namespace.NamespaceService; +import org.alfresco.service.namespace.QName; +import org.alfresco.util.VmShutdownListener; +import org.alfresco.util.VmShutdownListener.VmShutdownException; +import org.apache.commons.lang.mutable.MutableInt; +import org.apache.commons.lang.mutable.MutableLong; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.quartz.Job; +import org.quartz.JobDataMap; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.extensions.surf.util.I18NUtil; +import org.springframework.extensions.surf.util.Pair; +import org.springframework.extensions.surf.util.PropertyCheck; + +/** + * Component to migrate old-style content URL storage (contentUrl=store://...|mimetype=...) + * to the newer alf_content_url storage. + *

+ * The {@link ServiceRegistry} is used to record progress. The component picks up ranges of node IDs + * (DM and AVM) and records the progress. Since new nodes will not need converting, the converter + * will stop once it hits the largest node ID that it found upon first initiation. Once completed, + * the content store reader will start to pick up orphaned content and schedule it for deletion. + *

+ * A cluster-wide lock is set so that a single instance of this job will be running per Alfresco + * installation. + * + * @author Derek Hulley + * @since 3.2.1 + */ +public class ContentUrlConverterPatch extends AbstractPatch +{ + // Registry keys + private static final RegistryKey KEY_ADM_MAX_ID = new RegistryKey( + NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "max-id"); + private static final RegistryKey KEY_ADM_RANGE_START_ID = new RegistryKey( + NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "range-start-id"); + private static final RegistryKey KEY_ADM_DONE = new RegistryKey( + NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "done"); + private static final RegistryKey KEY_AVM_MAX_ID = new RegistryKey( + NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "avm", "max-id"); + private static final RegistryKey KEY_AVM_RANGE_START_ID = new RegistryKey( + NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "avm", "range-start-id"); + private static final RegistryKey KEY_AVM_DONE = new RegistryKey( + NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "avm", "done"); + private static final RegistryKey KEY_STORE_DONE = new RegistryKey( + NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "store", "done"); + + // Lock key + private static final QName LOCK = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter"); + + // Lock as per patching + private static Log logger = LogFactory.getLog(PatchExecuter.class); + private static VmShutdownListener shutdownListener = new VmShutdownListener("ContentUrlConverterPatch"); + + private RegistryService registryService; + private JobLockService jobLockService; + private NodeDaoService nodeDaoService; + private PatchDAO patchDAO; + private ContentStore contentStore; + private ContentDataDAO contentDataDAO; + private int threadCount; + private int batchSize; + private boolean runAsScheduledJob; + + private ThreadLocal runningAsJob = new ThreadLocal(); + + /** + * Default constructor + */ + public ContentUrlConverterPatch() + { + runningAsJob.set(Boolean.FALSE); + threadCount = 2; + batchSize=500; + } + + /** + * Service to record progress for later pick-up + */ + public void setRegistryService(RegistryService registryService) + { + this.registryService = registryService; + } + + /** + * Service to prevent concurrent execution + */ + public void setJobLockService(JobLockService jobLockService) + { + this.jobLockService = jobLockService; + } + + /** + * Provides low-level access to do the property transformation + */ + public void setNodeDaoService(NodeDaoService nodeDaoService) + { + this.nodeDaoService = nodeDaoService; + } + + /** + * Component that provides low-level queries and updates to support this patch + */ + public void setPatchDAO(PatchDAO patchDAO) + { + this.patchDAO = patchDAO; + } + + /** + * Set the store containing the content URLs to lift for potential cleaning. + * + * @param contentStore the store containing the system's content URLs + */ + public void setContentStore(ContentStore contentStore) + { + this.contentStore = contentStore; + } + + /** + * Set the component that will write URLs coming from the + * {@link ContentStore#getUrls(ContentUrlHandler) content store}. + * + * @param contentDataDAO the DAO to write the URLs + */ + public void setContentDataDAO(ContentDataDAO contentDataDAO) + { + this.contentDataDAO = contentDataDAO; + } + + /** + * Set the number of threads that will be used process the required work. + * + * @param threadCount the number of threads + */ + public void setThreadCount(int threadCount) + { + this.threadCount = threadCount; + } + + /** + * Set the number of URLs that are processed per job pass; this property is ignored + * when this component is run as a patch. Keep the number low (500) when running + * at short intervals on a on a live machine. + * + * @param batchSize the number of nodes to process per batch when running on a schedule + */ + public void setBatchSize(int batchSize) + { + this.batchSize = batchSize; + } + + /** + * Set whether the patch execution should just bypass any actual work i.e. the admin has + * chosen to manually trigger the work. + * + * @param runAsScheduledJob true to leave all work up to the scheduled job + */ + public void setRunAsScheduledJob(boolean runAsScheduledJob) + { + this.runAsScheduledJob = runAsScheduledJob; + } + + @Override + protected void checkProperties() + { + PropertyCheck.mandatory(this, "registryService", registryService); + PropertyCheck.mandatory(this, "jobLockService", jobLockService); + PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService); + PropertyCheck.mandatory(this, "patchDAO", patchDAO); + super.checkProperties(); + } + + /** + * Method called when executed as a scheduled job. + */ + private void executeViaJob() + { + AuthenticationUtil.RunAsWork patchRunAs = new AuthenticationUtil.RunAsWork() + { + public String doWork() throws Exception + { + RetryingTransactionCallback patchTxn = new RetryingTransactionCallback() + { + public String execute() throws Exception + { + try + { + runningAsJob.set(Boolean.TRUE); + String report = applyInternal(); + // done + return report; + } + finally + { + runningAsJob.set(Boolean.FALSE); // Back to default + } + } + }; + return transactionService.getRetryingTransactionHelper().doInTransaction(patchTxn); + } + }; + String report = AuthenticationUtil.runAs(patchRunAs, AuthenticationUtil.getSystemUserName()); + if (report != null) + { + logger.info(report); + } + } + + /** + * Gets a set of work to do and executes it within this transaction. If kicked off via a job, + * the task will exit before completion, on the assumption that it will be kicked off at regular + * intervals. When called as a patch, it will run to completion with full progress logging. + */ + @Override + protected String applyInternal() throws Exception + { + if (AlfrescoTransactionSupport.getTransactionReadState() != TxnReadState.TXN_READ_WRITE) + { + // Nothing to do + return null; + } + + boolean isRunningAsJob = runningAsJob.get().booleanValue(); + + // Do we bug out of patch execution + if (runAsScheduledJob && !isRunningAsJob) + { + return I18NUtil.getMessage("patch.convertContentUrls.bypassingPatch"); + } + + boolean completed = false; + // Lock in proportion to the batch size (0.1s per node or 0.8 min per 500) + String lockToken = getLock(batchSize*100L); + if (lockToken == null) + { + // Some other process is busy + if (isRunningAsJob) + { + // Fine, we're doing batches + return null; + } + else + { + throw new RuntimeException("Unable to get job lock during patch execution. Only one server should perform the upgrade."); + } + } + try + { + logger.info(I18NUtil.getMessage("patch.convertContentUrls.start")); + + logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.start")); + boolean admCompleted = applyADM(lockToken); + logger.info(I18NUtil.getMessage("patch.convertContentUrls.avm.start")); + boolean avmCompleted = applyAVM(lockToken); + logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.start", contentStore)); + boolean urlLiftingCompleted = applyUrlLifting(lockToken); + + completed = admCompleted && avmCompleted && urlLiftingCompleted; + } + finally + { + jobLockService.releaseLock(lockToken, LOCK); + } + + if (completed) + { + return I18NUtil.getMessage("patch.convertContentUrls.done"); + } + else + { + return I18NUtil.getMessage("patch.convertContentUrls.inProgress"); + } + } + + /** + * Attempts to get the lock. If the lock couldn't be taken, then null is returned. + * + * @return Returns the lock token or null + */ + private String getLock(long time) + { + try + { + return jobLockService.getLock(LOCK, time); + } + catch (LockAcquisitionException e) + { + return null; + } + } + + /** + * Attempts to get the lock. If it fails, the current transaction is marked for rollback. + * + * @return Returns the lock token + */ + private void refreshLock(String lockToken, long time) + { + if (lockToken == null) + { + throw new IllegalArgumentException("Must provide existing lockToken"); + } + jobLockService.refreshLock(lockToken, LOCK, time); + } + + private boolean applyADM(final String lockToken) + { + RetryingTransactionCallback callback = new RetryingTransactionCallback() + { + public Boolean execute() throws Throwable + { + return applyADM(); + } + }; + boolean done = false; + while (true && !shutdownListener.isVmShuttingDown()) + { + refreshLock(lockToken, batchSize*100L); + + done = transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true); + if (done) + { + break; + } + } + return done; + } + + /** + * Do the DM conversion work + * @return Returns true if the work is done + */ + private boolean applyADM() throws Exception + { + Long maxId = (Long) registryService.getProperty(KEY_ADM_MAX_ID); + + // Must we run at all? + Boolean done = (Boolean) registryService.getProperty(KEY_ADM_DONE); + if (done != null && done.booleanValue()) + { + logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.done", maxId)); + return true; + } + + if (maxId == null) + { + maxId = patchDAO.getMaxAdmNodeID(); + registryService.addProperty(KEY_ADM_MAX_ID, maxId); + } + Long startId = (Long) registryService.getProperty(KEY_ADM_RANGE_START_ID); + if (startId == null) + { + startId = 1L; + registryService.addProperty(KEY_ADM_RANGE_START_ID, startId); + } + + // Each thread gets 10 executions i.e. we get ranges for threadCount*10 lots of work + Long endId = startId; + Collection> batchProcessorWork = new ArrayList>(2); + for (long i = 0; i < threadCount*10; i++) + { + endId = startId + (i+1L) * batchSize; + Pair batchEntry = new Pair( + startId + i * batchSize, + endId); + batchProcessorWork.add(batchEntry); + } + BatchProcessWorkerAdaptor> batchProcessorWorker = new BatchProcessWorkerAdaptor>() + { + public void process(Pair range) throws Throwable + { + Long startId = range.getFirst(); + Long endId = range.getSecond(); + // Bulk-update the old content properties + patchDAO.updateAdmV31ContentProperties(startId, endId); + } + }; + BatchProcessor> batchProcessor = new BatchProcessor>( + "ContentUrlConverter.ADM (" + maxId + ")", + transactionService.getRetryingTransactionHelper(), + batchProcessorWork, threadCount, 1, + applicationEventPublisher, null, 1); + batchProcessor.process(batchProcessorWorker, true); + + // Advance + startId = endId; + // Have we + if (startId > maxId) + { + startId = maxId + 1; + // We're past the max ID that we're interested in + done = Boolean.TRUE; + registryService.addProperty(KEY_ADM_DONE, done); + logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.done", maxId)); + return true; + } + // Progress + super.reportProgress(maxId, startId); + + // Move the start ID on + registryService.addProperty(KEY_ADM_RANGE_START_ID, startId); + + // More to do + return false; + } + + private boolean applyAVM(final String lockToken) + { + RetryingTransactionCallback callback = new RetryingTransactionCallback() + { + public Boolean execute() throws Throwable + { + return applyAVM(); + } + }; + boolean done = false; + while (true && !shutdownListener.isVmShuttingDown()) + { + refreshLock(lockToken, batchSize*100L); + + done = transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true); + if (done) + { + break; + } + } + return done; + } + + /** + * Do the AVM conversion work + */ + private boolean applyAVM() throws Exception + { + Long maxId = (Long) registryService.getProperty(KEY_AVM_MAX_ID); + + // Must we run at all? + Boolean done = (Boolean) registryService.getProperty(KEY_AVM_DONE); + if (done != null && done.booleanValue()) + { + logger.info(I18NUtil.getMessage("patch.convertContentUrls.avm.done", maxId)); + return true; + } + + if (maxId == null) + { + maxId = patchDAO.getMaxAvmNodeID(); + registryService.addProperty(KEY_AVM_MAX_ID, maxId); + } + Long startId = (Long) registryService.getProperty(KEY_AVM_RANGE_START_ID); + if (startId == null) + { + startId = 1L; + registryService.addProperty(KEY_AVM_RANGE_START_ID, startId); + } + Long endId = startId + (batchSize * (long) threadCount * 10L); + + final List nodeIds = patchDAO.getAvmNodesWithOldContentProperties(startId, endId); + BatchProcessWorkerAdaptor batchProcessorWorker = new BatchProcessWorkerAdaptor() + { + public void process(Long nodeId) throws Throwable + { + // Convert it + PlainFileNode node = (PlainFileNode) AVMDAOs.Instance().fAVMNodeDAO.getByID(nodeId); + ContentData contentData = node.getContentData(); + node.setContentData(contentData); + AVMDAOs.Instance().fAVMNodeDAO.update(node); + } + }; + BatchProcessor batchProcessor = new BatchProcessor( + "ContentUrlConverter.AVM (" + maxId + ")", + transactionService.getRetryingTransactionHelper(), + nodeIds, threadCount, batchSize, + applicationEventPublisher, null, 1); + batchProcessor.process(batchProcessorWorker, true); + + // Advance + startId = endId; + // Have we + if (startId > maxId) + { + startId = maxId + 1; + // We're past the max ID that we're interested in + done = Boolean.TRUE; + registryService.addProperty(KEY_AVM_DONE, done); + logger.info(I18NUtil.getMessage("patch.convertContentUrls.avm.done", maxId)); + return true; + } + // Progress + super.reportProgress(maxId, startId); + + // Move the start ID on + registryService.addProperty(KEY_AVM_RANGE_START_ID, startId); + + // More to do + return false; + } + + private boolean applyUrlLifting(final String lockToken) throws Exception + { + RetryingTransactionCallback callback = new RetryingTransactionCallback() + { + public Boolean execute() throws Throwable + { + return applyUrlLiftingInTxn(lockToken); + } + }; + return transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true); + } + + private boolean applyUrlLiftingInTxn(final String lockToken) throws Exception + { + // Check the store + if (!contentStore.isWriteSupported()) + { + logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.readOnly")); + return true; + } + + Boolean admDone = (Boolean) registryService.getProperty(KEY_ADM_DONE); + Boolean avmDone = (Boolean) registryService.getProperty(KEY_AVM_DONE); + + if ((admDone == null || !admDone.booleanValue()) || (avmDone == null || !avmDone.booleanValue())) + { + logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.pending")); + return false; + } + + // Must we run at all? + Boolean done = (Boolean) registryService.getProperty(KEY_STORE_DONE); + if (done != null && done.booleanValue()) + { + logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.done")); + return true; + } + + final long totalSize = contentStore.getTotalSize(); + final MutableLong currentSize = new MutableLong(0L); + + final MutableInt count = new MutableInt(); + count.setValue(0); + ContentUrlHandler handler = new ContentUrlHandler() + { + private int allCount = 0; + public void handle(String contentUrl) + { + if (shutdownListener.isVmShuttingDown()) + { + throw new VmShutdownListener.VmShutdownException(); + } + + ContentReader reader = contentStore.getReader(contentUrl); + if (!reader.exists()) + { + // Not there any more + return; + } + currentSize.setValue(currentSize.longValue() + reader.getSize()); + try + { + contentDataDAO.createContentUrlOrphaned(contentUrl); + count.setValue(count.intValue()+1); + } + catch (DataIntegrityViolationException e) + { + // That's OK, the URL was already managed + } + allCount++; + if (allCount % batchSize == 0) + { + // Update our lock + refreshLock(lockToken, batchSize*100L); + if (totalSize < 0) + { + // Report + logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.progress", allCount)); + } + else + { + ContentUrlConverterPatch.super.reportProgress(totalSize, currentSize.longValue()); + } + } + } + }; + try + { + contentStore.getUrls(handler); + } + catch (UnsupportedOperationException e) + { + logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.noSupport")); + } + catch (VmShutdownException e) + { + // We didn't manage to complete + return false; + } + // Record the completion + done = Boolean.TRUE; + registryService.addProperty(KEY_STORE_DONE, done); + + // Done + logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.scheduled", count.intValue(), contentStore)); + + return true; + } + + /** + * Job to initiate the {@link ContentUrlConverterPatch} + * + * @author Derek Hulley + * @since 3.2.1 + */ + public static class ContentUrlConverterJob implements Job + { + public ContentUrlConverterJob() + { + } + + /** + * Calls the cleaner to do its work + */ + public void execute(JobExecutionContext context) throws JobExecutionException + { + JobDataMap jobData = context.getJobDetail().getJobDataMap(); + // extract the content cleaner to use + Object contentUrlConverterObj = jobData.get("contentUrlConverter"); + if (contentUrlConverterObj == null || !(contentUrlConverterObj instanceof ContentUrlConverterPatch)) + { + throw new AlfrescoRuntimeException( + "'contentUrlConverter' data must contain valid 'ContentUrlConverter' reference"); + } + ContentUrlConverterPatch contentUrlConverter = (ContentUrlConverterPatch) contentUrlConverterObj; + contentUrlConverter.executeViaJob(); + } + } +} diff --git a/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java index 61712741ab..66c6147569 100644 --- a/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java +++ b/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java @@ -32,12 +32,13 @@ import java.util.zip.CRC32; import org.alfresco.model.ContentModel; import org.alfresco.repo.admin.patch.AbstractPatch; import org.alfresco.repo.batch.BatchProcessor; -import org.alfresco.repo.batch.BatchProcessor.Worker; +import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker; import org.alfresco.repo.domain.ChildAssoc; import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.hibernate.ChildAssocImpl; import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.node.db.NodeDaoService; +import org.alfresco.repo.security.authentication.AuthenticationUtil; import org.alfresco.service.cmr.admin.PatchException; import org.alfresco.service.cmr.rule.RuleService; import org.alfresco.service.namespace.QName; @@ -48,8 +49,6 @@ import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.type.LongType; import org.hibernate.type.StringType; -import org.springframework.context.ApplicationEventPublisher; -import org.springframework.context.ApplicationEventPublisherAware; import org.springframework.extensions.surf.util.I18NUtil; import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; @@ -61,7 +60,7 @@ import org.springframework.orm.hibernate3.support.HibernateDaoSupport; * @author Derek Hulley * @since V2.2SP4 */ -public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationEventPublisherAware +public class FixNameCrcValuesPatch extends AbstractPatch { private static final String MSG_SUCCESS = "patch.fixNameCrcValues.result"; private static final String MSG_REWRITTEN = "patch.fixNameCrcValues.fixed"; @@ -71,7 +70,6 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE private NodeDaoService nodeDaoService; private QNameDAO qnameDAO; private RuleService ruleService; - private ApplicationEventPublisher applicationEventPublisher; public FixNameCrcValuesPatch() { @@ -106,14 +104,6 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE this.ruleService = ruleService; } - /* (non-Javadoc) - * @see org.springframework.context.ApplicationEventPublisherAware#setApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) - */ - public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher) - { - this.applicationEventPublisher = applicationEventPublisher; - } - @Override protected void checkProperties() { @@ -180,20 +170,33 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE public String fixCrcValues() throws Exception { // get the association types to check - BatchProcessor batchProcessor = new BatchProcessor(logger, transactionService - .getRetryingTransactionHelper(), ruleService, tenantAdminService, applicationEventPublisher, findMismatchedCrcs(), - "FixNameCrcValuesPatch", 1000, 2, 20); + BatchProcessor batchProcessor = new BatchProcessor( + "FixNameCrcValuesPatch", + transactionService.getRetryingTransactionHelper(), + findMismatchedCrcs(), + 2, 20, + applicationEventPublisher, + logger, 1000); // Precautionary flush and clear so that we have an empty session getSession().flush(); getSession().clear(); - int updated = batchProcessor.process(new Worker(){ - + int updated = batchProcessor.process(new BatchProcessWorker() + { public String getIdentifier(Long entry) { return entry.toString(); } + + public void beforeProcess() throws Throwable + { + // Switch rules off + ruleService.disableRules(); + // Authenticate as system + String systemUsername = AuthenticationUtil.getSystemUserName(); + AuthenticationUtil.setFullyAuthenticatedUser(systemUsername); + } public void process(Long childAssocId) throws Throwable { @@ -247,7 +250,13 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE // Record writeLine(I18NUtil.getMessage(MSG_REWRITTEN, childNode.getId(), childName, oldChildCrc, childCrc, qname, oldQNameCrc, qnameCrc)); - }}, true); + } + + public void afterProcess() throws Throwable + { + ruleService.enableRules(); + } + }, true); String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile); diff --git a/source/java/org/alfresco/repo/admin/registry/RegistryKey.java b/source/java/org/alfresco/repo/admin/registry/RegistryKey.java index 3a9f6f82af..31f6953698 100644 --- a/source/java/org/alfresco/repo/admin/registry/RegistryKey.java +++ b/source/java/org/alfresco/repo/admin/registry/RegistryKey.java @@ -19,6 +19,7 @@ package org.alfresco.repo.admin.registry; import java.io.Serializable; +import java.util.Arrays; /** * Key for looking up registry metadata. @@ -115,6 +116,46 @@ public class RegistryKey implements Serializable return sb.toString(); } + @Override + public boolean equals(Object obj) + { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + RegistryKey other = (RegistryKey) obj; + if (namespaceUri == null) + { + if (other.namespaceUri != null) + return false; + } + else if (!namespaceUri.equals(other.namespaceUri)) + return false; + if (!Arrays.equals(path, other.path)) + return false; + if (property == null) + { + if (other.property != null) + return false; + } + else if (!property.equals(other.property)) + return false; + return true; + } + + @Override + public int hashCode() + { + final int prime = 31; + int result = 1; + result = prime * result + ((namespaceUri == null) ? 0 : namespaceUri.hashCode()); + result = prime * result + Arrays.hashCode(path); + result = prime * result + ((property == null) ? 0 : property.hashCode()); + return result; + } + public String getNamespaceUri() { return namespaceUri; diff --git a/source/java/org/alfresco/repo/avm/OrphanReaper.java b/source/java/org/alfresco/repo/avm/OrphanReaper.java index f0f046867b..682a54cc5a 100644 --- a/source/java/org/alfresco/repo/avm/OrphanReaper.java +++ b/source/java/org/alfresco/repo/avm/OrphanReaper.java @@ -23,6 +23,7 @@ import java.util.List; import org.alfresco.repo.domain.DbAccessControlList; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; +import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.transaction.TransactionService; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -333,14 +334,17 @@ public class OrphanReaper else if (node.getType() == AVMNodeType.PLAIN_FILE) { PlainFileNode file = (PlainFileNode)node; - if (!file.isLegacyContentData()) + if (file.isLegacyContentData()) { - Long contentDataId = file.getContentDataId(); - if (contentDataId != null) - { - // The ContentDataDAO will take care of dereferencing and cleanup - AVMDAOs.Instance().contentDataDAO.deleteContentData(contentDataId); - } + // We quickly convert the old ContentData to the new storage + ContentData contentData = file.getContentData(); + file.setContentData(contentData); + } + Long contentDataId = file.getContentDataId(); + if (contentDataId != null) + { + // The ContentDataDAO will take care of dereferencing and cleanup + AVMDAOs.Instance().contentDataDAO.deleteContentData(contentDataId); } } // Finally, delete it diff --git a/source/java/org/alfresco/repo/avm/PlainFileNodeImpl.java b/source/java/org/alfresco/repo/avm/PlainFileNodeImpl.java index 5a30eea95c..e450c61541 100644 --- a/source/java/org/alfresco/repo/avm/PlainFileNodeImpl.java +++ b/source/java/org/alfresco/repo/avm/PlainFileNodeImpl.java @@ -28,7 +28,7 @@ import org.alfresco.repo.domain.contentdata.ContentDataDAO; import org.alfresco.repo.security.permissions.ACLCopyMode; import org.alfresco.service.cmr.avm.AVMNodeDescriptor; import org.alfresco.service.cmr.repository.ContentData; -import org.alfresco.service.namespace.QName; +import org.alfresco.service.namespace.QName; /** * A plain old file. Contains a Content object. @@ -67,7 +67,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode public PlainFileNodeImpl() { } - + /** * Make one from just an AVMStore. * This is the constructor used when a brand new plain file is being made. @@ -76,7 +76,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode public PlainFileNodeImpl(AVMStore store) { super(store); - setVersionID(1); + setVersionID(1); } /** @@ -92,10 +92,10 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode // layered files. setContentData(other.getContentData(null)); setVersionID(other.getVersionID() + 1); - - copyACLs(other, parentAcl, mode); - copyCreationAndOwnerBasicAttributes(other); - + + copyACLs(other, parentAcl, mode); + copyCreationAndOwnerBasicAttributes(other); + AVMDAOs.Instance().fAVMNodeDAO.save(this); copyProperties(other); @@ -114,8 +114,8 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode public PlainFileNodeImpl(AVMStore store, BasicAttributes attrs, ContentData content, - Map props, - Set aspects, + Map props, + Set aspects, DbAccessControlList acl, int versionID, Long parentAcl, ACLCopyMode mode) { @@ -129,10 +129,10 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode setAcl(acl.getCopy(parentAcl, mode)); } - AVMDAOs.Instance().fAVMNodeDAO.save(this); - - addProperties(props); - setAspects(new HashSet(aspects)); + AVMDAOs.Instance().fAVMNodeDAO.save(this); + + addProperties(props); + setAspects(new HashSet(aspects)); } /** @@ -414,7 +414,14 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode Long contentDataId = getContentDataId(); try { - return AVMDAOs.Instance().contentDataDAO.getContentData(contentDataId).getSecond(); + if (contentDataId == null) + { + return new ContentData(null, null, 0L, null); + } + else + { + return AVMDAOs.Instance().contentDataDAO.getContentData(contentDataId).getSecond(); + } } catch (Throwable e) { diff --git a/source/java/org/alfresco/repo/batch/BatchProcessor.java b/source/java/org/alfresco/repo/batch/BatchProcessor.java index 461512dde5..3d4bf7930e 100644 --- a/source/java/org/alfresco/repo/batch/BatchProcessor.java +++ b/source/java/org/alfresco/repo/batch/BatchProcessor.java @@ -34,15 +34,12 @@ import java.util.concurrent.TimeUnit; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.alfresco.repo.security.authentication.AuthenticationUtil; -import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork; -import org.alfresco.repo.tenant.TenantService; -import org.alfresco.repo.tenant.TenantUserService; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.TransactionListenerAdapter; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.service.cmr.rule.RuleService; +import org.alfresco.util.TraceableThreadFactory; import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.springframework.context.ApplicationEventPublisher; /** @@ -59,20 +56,15 @@ import org.springframework.context.ApplicationEventPublisher; */ public class BatchProcessor implements BatchMonitor { + /** The factory for all new threads */ + private TraceableThreadFactory threadFactory; + /** The logger to use. */ private final Log logger; /** The retrying transaction helper. */ private final RetryingTransactionHelper retryingTransactionHelper; - /** The rule service. */ - private final RuleService ruleService; - - /** The tenant user service. */ - private final TenantUserService tenantUserService; - - private final String tenantDomain; - /** The collection. */ private final Collection collection; @@ -113,84 +105,78 @@ public class BatchProcessor implements BatchMonitor private Date endTime; /** - * Instantiates a new batch processor. + * Instantiates a new batch processor using a the default logger, which references + * this class as the log category. * - * @param logger - * the logger to use - * @param retryingTransactionHelper - * the retrying transaction helper - * @param ruleService - * the rule service - * @param collection - * the collection - * @param processName - * the process name - * @param loggingInterval - * the number of entries to process before reporting progress - * @param applicationEventPublisher - * the application event publisher - * @param workerThreads - * the number of worker threads - * @param batchSize - * the number of entries we process at a time in a transaction + * @see #BatchProcessor(String, RetryingTransactionHelper, Collection, int, int, ApplicationEventPublisher, Log, int) */ - public BatchProcessor(Log logger, RetryingTransactionHelper retryingTransactionHelper, RuleService ruleService, - ApplicationEventPublisher applicationEventPublisher, Collection collection, String processName, - int loggingInterval, int workerThreads, int batchSize) + public BatchProcessor( + String processName, + RetryingTransactionHelper retryingTransactionHelper, + Collection collection, + int workerThreads, int batchSize) { - this(logger, retryingTransactionHelper, ruleService, null, applicationEventPublisher, collection, processName, - loggingInterval, workerThreads, batchSize); + this( + processName, + retryingTransactionHelper, + collection, + workerThreads, + batchSize, null, null, 1); } - + /** * Instantiates a new batch processor. * - * @param logger - * the logger to use - * @param retryingTransactionHelper - * the retrying transaction helper - * @param ruleService - * the rule service - * @param tenantUserService - * the tenant user service - * @param collection - * the collection * @param processName * the process name - * @param loggingInterval - * the number of entries to process before reporting progress - * @param applicationEventPublisher - * the application event publisher + * @param retryingTransactionHelper + * the retrying transaction helper + * @param collection + * the collection * @param workerThreads * the number of worker threads * @param batchSize * the number of entries we process at a time in a transaction + * @param applicationEventPublisher + * the application event publisher (may be null) + * @param logger + * the logger to use (may be null) + * @param loggingInterval + * the number of entries to process before reporting progress */ - public BatchProcessor(Log logger, RetryingTransactionHelper retryingTransactionHelper, RuleService ruleService, - TenantUserService tenantUserService, ApplicationEventPublisher applicationEventPublisher, Collection collection, String processName, - int loggingInterval, int workerThreads, int batchSize) + public BatchProcessor( + String processName, + RetryingTransactionHelper retryingTransactionHelper, + Collection collection, + int workerThreads, int batchSize, + ApplicationEventPublisher applicationEventPublisher, + Log logger, + int loggingInterval) { - this.logger = logger; - this.retryingTransactionHelper = retryingTransactionHelper; - this.ruleService = ruleService; - this.tenantUserService = tenantUserService; - this.collection = collection; + this.threadFactory = new TraceableThreadFactory(); + this.threadFactory.setNamePrefix(processName); + this.threadFactory.setThreadDaemon(true); + this.processName = processName; - this.loggingInterval = loggingInterval; + this.retryingTransactionHelper = retryingTransactionHelper; + this.collection = collection; this.workerThreads = workerThreads; this.batchSize = batchSize; - - if (tenantUserService != null) + if (logger == null) { - this.tenantDomain = tenantUserService.getUserDomain(AuthenticationUtil.getRunAsUser()); + this.logger = LogFactory.getLog(this.getClass()); } else { - this.tenantDomain = TenantService.DEFAULT_DOMAIN; + this.logger = logger; } + this.loggingInterval = loggingInterval; // Let the (enterprise) monitoring side know of our presence - applicationEventPublisher.publishEvent(new BatchMonitorEvent(this)); + if (applicationEventPublisher != null) + { + applicationEventPublisher.publishEvent(new BatchMonitorEvent(this)); + } } /* @@ -302,14 +288,14 @@ public class BatchProcessor implements BatchMonitor * the worker * @param splitTxns * Can the modifications to Alfresco be split across multiple transactions for maximum performance? If - * true, worker invocations are isolated in separate transactions in batches of 10 for + * true, worker invocations are isolated in separate transactions in batches for * increased performance. If false, all invocations are performed in the current * transaction. This is required if calling synchronously (e.g. in response to an authentication event in * the same transaction). * @return the number of invocations */ @SuppressWarnings("serial") - public int process(final Worker worker, final boolean splitTxns) + public int process(final BatchProcessWorker worker, final boolean splitTxns) { int count = this.collection.size(); synchronized (this) @@ -330,9 +316,10 @@ public class BatchProcessor implements BatchMonitor } // Create a thread pool executor with the specified number of threads and a finite blocking queue of jobs - ExecutorService executorService = splitTxns && this.workerThreads > 1 ? new ThreadPoolExecutor( - this.workerThreads, this.workerThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue( - this.workerThreads * this.batchSize * 10) + ExecutorService executorService = splitTxns && this.workerThreads > 1 ? + new ThreadPoolExecutor( + this.workerThreads, this.workerThreads, 0L, TimeUnit.MILLISECONDS, + new ArrayBlockingQueue(this.workerThreads * this.batchSize * 10) { // Add blocking behaviour to work queue @Override @@ -349,7 +336,8 @@ public class BatchProcessor implements BatchMonitor return true; } - }) : null; + }, + threadFactory) : null; try { Iterator iterator = this.collection.iterator(); @@ -452,9 +440,8 @@ public class BatchProcessor implements BatchMonitor /** * An interface for workers to be invoked by the {@link BatchProcessor}. */ - public interface Worker + public interface BatchProcessWorker { - /** * Gets an identifier for the given entry (for monitoring / logging purposes). * @@ -464,6 +451,14 @@ public class BatchProcessor implements BatchMonitor */ public String getIdentifier(T entry); + /** + * Callback to allow thread initialization before the work entries are + * {@link #process(Object) processed}. Typically, this will include authenticating + * as a valid user and disbling or enabling any system flags that might affect the + * entry processing. + */ + public void beforeProcess() throws Throwable; + /** * Processes the given entry. * @@ -473,6 +468,38 @@ public class BatchProcessor implements BatchMonitor * on any error */ public void process(T entry) throws Throwable; + + /** + * Callback to allow thread cleanup after the work entries have been + * {@link #process(Object) processed}. + * Typically, this will involve cleanup of authentication and resetting any + * system flags previously set. + *

+ * This call is made regardless of the outcome of the entry processing. + */ + public void afterProcess() throws Throwable; + } + + /** + * Adaptor that allows implementations to only implement {@link #process(Object)} + */ + public static abstract class BatchProcessWorkerAdaptor implements BatchProcessWorker + { + /** + * @return Returns the toString() of the entry + */ + public String getIdentifier(TT entry) + { + return entry.toString(); + } + /** No-op */ + public void beforeProcess() throws Throwable + { + } + /** No-op */ + public void afterProcess() throws Throwable + { + } } /** @@ -491,7 +518,7 @@ public class BatchProcessor implements BatchMonitor * @param splitTxns * If true, the worker invocation is made in a new transaction. */ - public TxnCallback(Worker worker, List batch, boolean splitTxns) + public TxnCallback(BatchProcessWorker worker, List batch, boolean splitTxns) { this.worker = worker; this.batch = batch; @@ -499,7 +526,7 @@ public class BatchProcessor implements BatchMonitor } /** The worker. */ - private final Worker worker; + private final BatchProcessWorker worker; /** The batch. */ private final List batch; @@ -602,26 +629,21 @@ public class BatchProcessor implements BatchMonitor */ public void run() { - // Disable rules for this thread - BatchProcessor.this.ruleService.disableRules(); + try + { + } + catch (Throwable e) + { + BatchProcessor.this.logger.error("Failed to cleanup Worker after processing.", e); + } + final BatchProcessor.TxnCallback callback = this; try { - String systemUser = AuthenticationUtil.getSystemUserName(); - if (tenantUserService != null) - { - systemUser = tenantUserService.getDomainUser(AuthenticationUtil.getSystemUserName(), tenantDomain); - } - - AuthenticationUtil.runAs(new RunAsWork() - { - public Void doWork() throws Exception - { - BatchProcessor.this.retryingTransactionHelper.doInTransaction(callback, false, splitTxns); - return null; - } - }, systemUser); + worker.beforeProcess(); + BatchProcessor.this.retryingTransactionHelper.doInTransaction(callback, false, splitTxns); + worker.afterProcess(); } catch (Throwable t) { @@ -651,11 +673,6 @@ public class BatchProcessor implements BatchMonitor throw new AlfrescoRuntimeException("Transactional error during " + getProcessName(), t); } } - finally - { - // Re-enable rules - BatchProcessor.this.ruleService.enableRules(); - } commitProgress(); } diff --git a/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java b/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java index bae3a9220f..7757998d7f 100644 --- a/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java +++ b/source/java/org/alfresco/repo/content/cleanup/ContentStoreCleaner.java @@ -34,6 +34,7 @@ import org.alfresco.service.namespace.NamespaceService; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.VmShutdownListener; +import org.alfresco.util.VmShutdownListener.VmShutdownException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.extensions.surf.util.Pair; @@ -41,8 +42,6 @@ import org.springframework.extensions.surf.util.PropertyCheck; /** * This component is responsible cleaning up orphaned content. - *

- * TODO: Fix up new comments * * Clean-up happens at two levels.

* Eager cleanup: (since 3.2)

@@ -53,10 +52,9 @@ import org.springframework.extensions.surf.util.PropertyCheck; * procedures should be plugged in as listeners if this is required. *

* Lazy cleanup:

- * This is triggered by means of a {@link ContentStoreCleanupJob Quartz job}. This is - * a heavy-weight process that effectively compares the database metadata with the - * content URLs controlled by the various stores. Once again, the listeners are called - * appropriately. + * This is triggered by means of a {@link ContentStoreCleanupJob Quartz job}. This process + * gets content URLs that have been marked as orphaned and cleans up the various stores. + * Once again, the listeners are called appropriately. *

* How backup policies are affected:

* When restoring the system from a backup, the type of restore required is dictated by @@ -352,15 +350,4 @@ public class ContentStoreCleaner // Done return size; } - - /** - * Message carrier to break out of loops using the callback. - * - * @author Derek Hulley - * @since 2.1.3 - */ - private class VmShutdownException extends RuntimeException - { - private static final long serialVersionUID = -5876107469054587072L; - } } diff --git a/source/java/org/alfresco/repo/descriptor/RepositoryDescriptorDAOImpl.java b/source/java/org/alfresco/repo/descriptor/RepositoryDescriptorDAOImpl.java index 1640712a7a..e37051f637 100644 --- a/source/java/org/alfresco/repo/descriptor/RepositoryDescriptorDAOImpl.java +++ b/source/java/org/alfresco/repo/descriptor/RepositoryDescriptorDAOImpl.java @@ -22,7 +22,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.Serializable; -import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -33,6 +32,7 @@ import org.alfresco.model.ContentModel; import org.alfresco.repo.content.MimetypeMap; import org.alfresco.repo.descriptor.DescriptorServiceImpl.BaseDescriptor; import org.alfresco.repo.importer.ImporterBootstrap; +import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentService; import org.alfresco.service.cmr.repository.ContentWriter; @@ -201,17 +201,18 @@ public class RepositoryDescriptorDAOImpl implements DescriptorDAO props.put(ContentModel.PROP_SYS_VERSION_SCHEMA, serverDescriptor.getSchema()); this.nodeService.addProperties(currentDescriptorNodeRef, props); - // The version edition property may already have been overwritten with a license, so only set the property - // if it doesn't already contain ContentData - final Serializable value = this.nodeService.getProperty(currentDescriptorNodeRef, + // ALF-726: v3.1.x Content Cleaner Job needs to be ported to v3.2 + // In order to migrate properly, this property needs to be d:content. We will rewrite the property with the + // license update code. There is no point attempting to rewrite the property here. + final Serializable value = this.nodeService.getProperty( + currentDescriptorNodeRef, ContentModel.PROP_SYS_VERSION_EDITION); - if (!(value instanceof Collection) || ((Collection) value).isEmpty() - || ((Collection) value).iterator().next() instanceof String) + if (value == null) { - final Collection editions = new ArrayList(); - editions.add(serverDescriptor.getEdition()); - this.nodeService.setProperty(currentDescriptorNodeRef, ContentModel.PROP_SYS_VERSION_EDITION, - (Serializable) editions); + this.nodeService.setProperty( + currentDescriptorNodeRef, + ContentModel.PROP_SYS_VERSION_EDITION, + new ContentData(null, null, 0L, null)); } // done diff --git a/source/java/org/alfresco/repo/domain/ContentDataId.java b/source/java/org/alfresco/repo/domain/ContentDataId.java new file mode 100644 index 0000000000..98373ef78d --- /dev/null +++ b/source/java/org/alfresco/repo/domain/ContentDataId.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2005-2008 Alfresco Software Limited. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + * As a special exception to the terms and conditions of version 2.0 of + * the GPL, you may redistribute this Program in connection with Free/Libre + * and Open Source Software ("FLOSS") applications as described in Alfresco's + * FLOSS exception. You should have recieved a copy of the text describing + * the FLOSS exception, and it is also available here: + * http://www.alfresco.com/legal/licensing" + */ +package org.alfresco.repo.domain; + +import java.io.Serializable; + +/** + * Data type carrying the ID of a ContentData reference. + * + * @author Derek Hulley + * @since 3.2.1 + */ +public class ContentDataId implements Serializable +{ + private static final long serialVersionUID = -4980820192507809266L; + + private final Long id; + + public ContentDataId(Long id) + { + super(); + this.id = id; + } + + @Override + public String toString() + { + return "ContentDataId [id=" + id + "]"; + } + + public Long getId() + { + return id; + } +} diff --git a/source/java/org/alfresco/repo/domain/NodePropertyValue.java b/source/java/org/alfresco/repo/domain/NodePropertyValue.java index 4515c16b66..bd8be643f3 100644 --- a/source/java/org/alfresco/repo/domain/NodePropertyValue.java +++ b/source/java/org/alfresco/repo/domain/NodePropertyValue.java @@ -127,7 +127,18 @@ public class NodePropertyValue implements Cloneable, Serializable @Override Serializable convert(Serializable value) { - return DefaultTypeConverter.INSTANCE.convert(Long.class, value); + if (value == null) + { + return null; + } + else if (value instanceof ContentDataId) + { + return ((ContentDataId)value).getId(); + } + else + { + return DefaultTypeConverter.INSTANCE.convert(Long.class, value); + } } }, FLOAT @@ -467,6 +478,41 @@ public class NodePropertyValue implements Cloneable, Serializable { return DefaultTypeConverter.INSTANCE.convert(Period.class, value); } + }, + CONTENT_DATA_ID + { + @Override + public Integer getOrdinalNumber() + { + return Integer.valueOf(21); + } + + @Override + protected ValueType getPersistedType(Serializable value) + { + return ValueType.LONG; + } + + @Override + Serializable convert(Serializable value) + { + if (value == null) + { + return null; + } + else if (value instanceof Long) + { + return value; + } + else if (value instanceof ContentDataId) + { + return ((ContentDataId)value).getId(); + } + else + { + return DefaultTypeConverter.INSTANCE.convert(ContentData.class, value); + } + } } ; @@ -566,6 +612,10 @@ public class NodePropertyValue implements Cloneable, Serializable { return ValueType.PERIOD; } + else if (value instanceof ContentDataId) + { + return ValueType.CONTENT_DATA_ID; + } else { // type is not recognised as belonging to any particular slot @@ -592,7 +642,7 @@ public class NodePropertyValue implements Cloneable, Serializable valueTypesByPropertyType.put(DataTypeDefinition.DATE, ValueType.DATE); valueTypesByPropertyType.put(DataTypeDefinition.DATETIME, ValueType.DATE); valueTypesByPropertyType.put(DataTypeDefinition.CATEGORY, ValueType.NODEREF); - valueTypesByPropertyType.put(DataTypeDefinition.CONTENT, ValueType.CONTENT); + valueTypesByPropertyType.put(DataTypeDefinition.CONTENT, ValueType.CONTENT_DATA_ID); valueTypesByPropertyType.put(DataTypeDefinition.TEXT, ValueType.STRING); valueTypesByPropertyType.put(DataTypeDefinition.MLTEXT, ValueType.MLTEXT); valueTypesByPropertyType.put(DataTypeDefinition.NODE_REF, ValueType.NODEREF); diff --git a/source/java/org/alfresco/repo/domain/contentdata/AbstractContentDataDAOImpl.java b/source/java/org/alfresco/repo/domain/contentdata/AbstractContentDataDAOImpl.java index a6fe734b85..afb20c5f96 100644 --- a/source/java/org/alfresco/repo/domain/contentdata/AbstractContentDataDAOImpl.java +++ b/source/java/org/alfresco/repo/domain/contentdata/AbstractContentDataDAOImpl.java @@ -1,4 +1,4 @@ -/* +/* * Copyright (C) 2005-2010 Alfresco Software Limited. * * This file is part of Alfresco @@ -14,60 +14,60 @@ * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.contentdata; - -import java.io.Serializable; -import java.util.Locale; -import java.util.Set; - -import org.alfresco.repo.cache.SimpleCache; -import org.alfresco.repo.cache.lookup.EntityLookupCache; -import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor; -import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner; -import org.alfresco.repo.domain.LocaleDAO; -import org.alfresco.repo.domain.encoding.EncodingDAO; -import org.alfresco.repo.domain.mimetype.MimetypeDAO; -import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.alfresco.repo.transaction.TransactionListenerAdapter; -import org.alfresco.repo.transaction.TransactionalResourceHelper; -import org.alfresco.service.cmr.repository.ContentData; -import org.alfresco.util.EqualsHelper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.dao.ConcurrencyFailureException; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.extensions.surf.util.Pair; - -/** - * Abstract implementation for ContentData DAO. - *

- * This provides basic services such as caching, but defers to the underlying implementation - * for CRUD operations. - *

- * The DAO deals in {@link ContentData} instances. The cache is primarily present to decode - * IDs into ContentData instances. - * - * @author Derek Hulley - * @since 3.2 - */ -public abstract class AbstractContentDataDAOImpl implements ContentDataDAO -{ + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.contentdata; + +import java.io.Serializable; +import java.util.Locale; +import java.util.Set; + +import org.alfresco.repo.cache.SimpleCache; +import org.alfresco.repo.cache.lookup.EntityLookupCache; +import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor; +import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner; +import org.alfresco.repo.domain.LocaleDAO; +import org.alfresco.repo.domain.encoding.EncodingDAO; +import org.alfresco.repo.domain.mimetype.MimetypeDAO; +import org.alfresco.repo.transaction.AlfrescoTransactionSupport; +import org.alfresco.repo.transaction.TransactionListenerAdapter; +import org.alfresco.repo.transaction.TransactionalResourceHelper; +import org.alfresco.service.cmr.repository.ContentData; +import org.alfresco.util.EqualsHelper; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.dao.ConcurrencyFailureException; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.extensions.surf.util.Pair; + +/** + * Abstract implementation for ContentData DAO. + *

+ * This provides basic services such as caching, but defers to the underlying implementation + * for CRUD operations. + *

+ * The DAO deals in {@link ContentData} instances. The cache is primarily present to decode + * IDs into ContentData instances. + * + * @author Derek Hulley + * @since 3.2 + */ +public abstract class AbstractContentDataDAOImpl implements ContentDataDAO +{ private static final String CACHE_REGION_CONTENT_DATA = "ContentData"; - /** - * Content URL IDs to delete before final commit. - */ - private static final String KEY_PRE_COMMIT_CONTENT_URL_DELETIONS = "AbstractContentDataDAOImpl.PreCommitContentUrlDeletions"; - - private static Log logger = LogFactory.getLog(AbstractContentDataDAOImpl.class); - + /** + * Content URL IDs to delete before final commit. + */ + private static final String KEY_PRE_COMMIT_CONTENT_URL_DELETIONS = "AbstractContentDataDAOImpl.PreCommitContentUrlDeletions"; + + private static Log logger = LogFactory.getLog(AbstractContentDataDAOImpl.class); + private final ContentDataCallbackDAO contentDataCallbackDAO; - private MimetypeDAO mimetypeDAO; - private EncodingDAO encodingDAO; - private LocaleDAO localeDAO; - private EagerContentStoreCleaner contentStoreCleaner; - + private MimetypeDAO mimetypeDAO; + private EncodingDAO encodingDAO; + private LocaleDAO localeDAO; + private EagerContentStoreCleaner contentStoreCleaner; + /** * Cache for the ContentData class:
* KEY: ID
@@ -85,98 +85,98 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO this.contentDataCache = new EntityLookupCache(contentDataCallbackDAO); } - public void setMimetypeDAO(MimetypeDAO mimetypeDAO) - { - this.mimetypeDAO = mimetypeDAO; - } - - public void setEncodingDAO(EncodingDAO encodingDAO) - { - this.encodingDAO = encodingDAO; - } - - public void setLocaleDAO(LocaleDAO localeDAO) - { - this.localeDAO = localeDAO; - } - - /** - * Set this property to enable eager cleanup of orphaned content. - * - * @param contentStoreCleaner an eager cleaner (may be null) - */ - public void setContentStoreCleaner(EagerContentStoreCleaner contentStoreCleaner) - { - this.contentStoreCleaner = contentStoreCleaner; - } - - /** - * @param contentDataCache the cache of IDs to ContentData and vice versa - */ + public void setMimetypeDAO(MimetypeDAO mimetypeDAO) + { + this.mimetypeDAO = mimetypeDAO; + } + + public void setEncodingDAO(EncodingDAO encodingDAO) + { + this.encodingDAO = encodingDAO; + } + + public void setLocaleDAO(LocaleDAO localeDAO) + { + this.localeDAO = localeDAO; + } + + /** + * Set this property to enable eager cleanup of orphaned content. + * + * @param contentStoreCleaner an eager cleaner (may be null) + */ + public void setContentStoreCleaner(EagerContentStoreCleaner contentStoreCleaner) + { + this.contentStoreCleaner = contentStoreCleaner; + } + + /** + * @param contentDataCache the cache of IDs to ContentData and vice versa + */ public void setContentDataCache(SimpleCache contentDataCache) - { + { this.contentDataCache = new EntityLookupCache( contentDataCache, CACHE_REGION_CONTENT_DATA, contentDataCallbackDAO); - } - - /** - * Register new content for post-rollback handling - */ - protected void registerNewContentUrl(String contentUrl) - { - contentStoreCleaner.registerNewContentUrl(contentUrl); - } - - /** - * A content_url entity was dereferenced. This makes no assumptions about the - * current references - dereference deletion is handled in the commit phase. - */ + } + + /** + * Register new content for post-rollback handling + */ + protected void registerNewContentUrl(String contentUrl) + { + contentStoreCleaner.registerNewContentUrl(contentUrl); + } + + /** + * A content_url entity was dereferenced. This makes no assumptions about the + * current references - dereference deletion is handled in the commit phase. + */ protected void registerDereferencedContentUrl(String contentUrl) - { - Set contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS); - if (contentUrls.size() == 0) - { - ContentUrlDeleteTransactionListener listener = new ContentUrlDeleteTransactionListener(); - AlfrescoTransactionSupport.bindListener(listener); - } - contentUrls.add(contentUrl); - } - - /** - * {@inheritDoc} - */ - public Pair createContentData(ContentData contentData) - { + { + Set contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS); + if (contentUrls.size() == 0) + { + ContentUrlDeleteTransactionListener listener = new ContentUrlDeleteTransactionListener(); + AlfrescoTransactionSupport.bindListener(listener); + } + contentUrls.add(contentUrl); + } + + /** + * {@inheritDoc} + */ + public Pair createContentData(ContentData contentData) + { if (contentData == null) { throw new IllegalArgumentException("ContentData values cannot be null"); } Pair entityPair = contentDataCache.getOrCreateByValue(contentData); return entityPair; - } - - /** - * {@inheritDoc} - */ - public Pair getContentData(Long id) - { + } + + /** + * {@inheritDoc} + */ + public Pair getContentData(Long id) + { if (id == null) - { + { throw new IllegalArgumentException("Cannot look up ContentData by null ID."); - } + } Pair entityPair = contentDataCache.getByKey(id); if (entityPair == null) { throw new DataIntegrityViolationException("No ContentData value exists for ID " + id); } return entityPair; - } - - /** - * {@inheritDoc} - */ + } + + /** + * {@inheritDoc} + */ public void updateContentData(Long id, ContentData contentData) { if (id == null) @@ -197,21 +197,21 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO /** * {@inheritDoc} */ - public void deleteContentData(Long id) - { + public void deleteContentData(Long id) + { if (id == null) { throw new IllegalArgumentException("Cannot delete ContentData by null ID."); } int deleted = contentDataCache.deleteByKey(id); - if (deleted < 1) - { + if (deleted < 1) + { throw new ConcurrencyFailureException("ContentData with ID " + id + " no longer exists"); - } - return; - } - - /** + } + return; + } + + /** * Callback for alf_content_data DAO. */ private class ContentDataCallbackDAO extends EntityLookupCallbackDAOAdaptor @@ -254,83 +254,83 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO } /** - * Translates this instance into an externally-usable ContentData instance. - */ - private ContentData makeContentData(ContentDataEntity contentDataEntity) - { - // Decode content URL - String contentUrl = contentDataEntity.getContentUrl(); - long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue(); - // Decode mimetype - Long mimetypeId = contentDataEntity.getMimetypeId(); - String mimetype = null; - if (mimetypeId != null) - { - mimetype = mimetypeDAO.getMimetype(mimetypeId).getSecond(); - } - // Decode encoding - Long encodingId = contentDataEntity.getEncodingId(); - String encoding = null; - if (encodingId != null) - { - encoding = encodingDAO.getEncoding(encodingId).getSecond(); - } - // Decode locale - Long localeId = contentDataEntity.getLocaleId(); - Locale locale = null; - if (localeId != null) - { - locale = localeDAO.getLocalePair(localeId).getSecond(); - } - // Build the ContentData - ContentData contentData = new ContentData(contentUrl, mimetype, size, encoding, locale); - // Done - return contentData; - } - - /** - * Translates the {@link ContentData} into persistable values using the helper DAOs - */ - private ContentDataEntity createContentDataEntity(ContentData contentData) - { - // Resolve the content URL - Long contentUrlId = null; - String contentUrl = contentData.getContentUrl(); - long size = contentData.getSize(); - if (contentUrl != null) - { - // We must find or create the ContentUrlEntity - contentUrlId = getOrCreateContentUrlEntity(contentUrl, size).getId(); - } - // Resolve the mimetype - Long mimetypeId = null; - String mimetype = contentData.getMimetype(); - if (mimetype != null) - { - mimetypeId = mimetypeDAO.getOrCreateMimetype(mimetype).getFirst(); - } - // Resolve the encoding - Long encodingId = null; - String encoding = contentData.getEncoding(); - if (encoding != null) - { - encodingId = encodingDAO.getOrCreateEncoding(encoding).getFirst(); - } - // Resolve the locale - Long localeId = null; - Locale locale = contentData.getLocale(); - if (locale != null) - { - localeId = localeDAO.getOrCreateLocalePair(locale).getFirst(); - } - - // Create ContentDataEntity - ContentDataEntity contentDataEntity = createContentDataEntity(contentUrlId, mimetypeId, encodingId, localeId); - // Done - return contentDataEntity; - } - - /** + * Translates this instance into an externally-usable ContentData instance. + */ + private ContentData makeContentData(ContentDataEntity contentDataEntity) + { + // Decode content URL + String contentUrl = contentDataEntity.getContentUrl(); + long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue(); + // Decode mimetype + Long mimetypeId = contentDataEntity.getMimetypeId(); + String mimetype = null; + if (mimetypeId != null) + { + mimetype = mimetypeDAO.getMimetype(mimetypeId).getSecond(); + } + // Decode encoding + Long encodingId = contentDataEntity.getEncodingId(); + String encoding = null; + if (encodingId != null) + { + encoding = encodingDAO.getEncoding(encodingId).getSecond(); + } + // Decode locale + Long localeId = contentDataEntity.getLocaleId(); + Locale locale = null; + if (localeId != null) + { + locale = localeDAO.getLocalePair(localeId).getSecond(); + } + // Build the ContentData + ContentData contentData = new ContentData(contentUrl, mimetype, size, encoding, locale); + // Done + return contentData; + } + + /** + * Translates the {@link ContentData} into persistable values using the helper DAOs + */ + private ContentDataEntity createContentDataEntity(ContentData contentData) + { + // Resolve the content URL + Long contentUrlId = null; + String contentUrl = contentData.getContentUrl(); + long size = contentData.getSize(); + if (contentUrl != null) + { + // We must find or create the ContentUrlEntity + contentUrlId = getOrCreateContentUrlEntity(contentUrl, size).getId(); + } + // Resolve the mimetype + Long mimetypeId = null; + String mimetype = contentData.getMimetype(); + if (mimetype != null) + { + mimetypeId = mimetypeDAO.getOrCreateMimetype(mimetype).getFirst(); + } + // Resolve the encoding + Long encodingId = null; + String encoding = contentData.getEncoding(); + if (encoding != null) + { + encodingId = encodingDAO.getOrCreateEncoding(encoding).getFirst(); + } + // Resolve the locale + Long localeId = null; + Locale locale = contentData.getLocale(); + if (locale != null) + { + localeId = localeDAO.getOrCreateLocalePair(locale).getFirst(); + } + + // Create ContentDataEntity + ContentDataEntity contentDataEntity = createContentDataEntity(contentUrlId, mimetypeId, encodingId, localeId); + // Done + return contentDataEntity; + } + + /** * Translates the {@link ContentData} into persistable values using the helper DAOs */ private int updateContentDataEntity(ContentDataEntity contentDataEntity, ContentData contentData) @@ -387,83 +387,90 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO } /** - * Caching method that creates an entity for content_url_entity. - */ - private ContentUrlEntity getOrCreateContentUrlEntity(String contentUrl, long size) - { - // Create the content URL entity - ContentUrlEntity contentUrlEntity = getContentUrlEntity(contentUrl); - // If it exists, then we can just re-use it, but check that the size is consistent - if (contentUrlEntity != null) - { - // Reuse it - long existingSize = contentUrlEntity.getSize(); - if (size != existingSize) - { - logger.warn( - "Re-using Content URL, but size is mismatched: \n" + - " Inbound: " + contentUrl + "\n" + - " Existing: " + contentUrlEntity); - } - } - else - { - // Create it - contentUrlEntity = createContentUrlEntity(contentUrl, size); - } - // Done - return contentUrlEntity; - } - - /** - * @param contentUrl the content URL to create or search for - */ - protected abstract ContentUrlEntity createContentUrlEntity(String contentUrl, long size); - - /** - * @param id the ID of the content url entity - * @return Return the entity or null if it doesn't exist - */ - protected abstract ContentUrlEntity getContentUrlEntity(Long id); - - /** - * @param contentUrl the URL of the content url entity - * @return Return the entity or null if it doesn't exist - */ - protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl); - - /** - * @param contentUrl the URL of the content url entity - * @return Return the entity or null if it doesn't exist or is still - * referenced by a content_data entity - */ - protected abstract ContentUrlEntity getContentUrlEntityUnreferenced(String contentUrl); - - /** + * Method to create (or get an existing) content URL. The URL will be unorphaned + * whether it has been created or is being re-used. + */ + private ContentUrlEntity getOrCreateContentUrlEntity(String contentUrl, long size) + { + // Create the content URL entity + ContentUrlEntity contentUrlEntity = getContentUrlEntity(contentUrl); + // If it exists, then we can just re-use it, but check that the size is consistent + if (contentUrlEntity != null) + { + // Reuse it + long existingSize = contentUrlEntity.getSize(); + if (size != existingSize) + { + logger.warn( + "Re-using Content URL, but size is mismatched: \n" + + " Inbound: " + contentUrl + "\n" + + " Existing: " + contentUrlEntity); + } + // Check orphan state + if (contentUrlEntity.getOrphanTime() != null) + { + Long id = contentUrlEntity.getId(); + updateContentUrlOrphanTime(id, null); + } + } + else + { + // Create it + contentUrlEntity = createContentUrlEntity(contentUrl, size); + } + // Done + return contentUrlEntity; + } + + /** + * @param contentUrl the content URL to create or search for + */ + protected abstract ContentUrlEntity createContentUrlEntity(String contentUrl, long size); + + /** + * @param id the ID of the content url entity + * @return Return the entity or null if it doesn't exist + */ + protected abstract ContentUrlEntity getContentUrlEntity(Long id); + + /** + * @param contentUrl the URL of the content url entity + * @return Return the entity or null if it doesn't exist + */ + protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl); + + /** + * @param contentUrl the URL of the content url entity + * @return Return the entity or null if it doesn't exist or is still + * referenced by a content_data entity + */ + protected abstract ContentUrlEntity getContentUrlEntityUnreferenced(String contentUrl); + + /** * Update a content URL with the given orphan time * * @param id the unique ID of the entity * @param orphanTime the time (ms since epoch) that the entity was orphaned * @return Returns the number of rows updated - */ - protected abstract int updateContentUrlOrphanTime(Long id, long orphanTime); - - /** - * Create the row for the alf_content_data - */ - protected abstract ContentDataEntity createContentDataEntity( - Long contentUrlId, - Long mimetypeId, - Long encodingId, - Long localeId); - - /** - * @param id the entity ID - * @return Returns the entity or null if it doesn't exist - */ - protected abstract ContentDataEntity getContentDataEntity(Long id); - - /** + */ + protected abstract int updateContentUrlOrphanTime(Long id, Long orphanTime); + + /** + * Create the row for the alf_content_data + */ + protected abstract ContentDataEntity createContentDataEntity( + Long contentUrlId, + Long mimetypeId, + Long encodingId, + Long localeId); + + /** + * @param id the entity ID + * @return Returns the entity or null if it doesn't exist + */ + protected abstract ContentDataEntity getContentDataEntity(Long id); + + /** * Update an existing alf_content_data entity * * @param entity the existing entity that will be updated @@ -472,44 +479,44 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO protected abstract int updateContentDataEntity(ContentDataEntity entity); /** - * Delete the entity with the given ID - * - * @return Returns the number of rows deleted - */ - protected abstract int deleteContentDataEntity(Long id); - - /** - * Transactional listener that deletes unreferenced content_url entities. - * - * @author Derek Hulley - */ - public class ContentUrlDeleteTransactionListener extends TransactionListenerAdapter - { - @Override - public void beforeCommit(boolean readOnly) - { - // Ignore read-only - if (readOnly) - { - return; - } - Set contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS); + * Delete the entity with the given ID + * + * @return Returns the number of rows deleted + */ + protected abstract int deleteContentDataEntity(Long id); + + /** + * Transactional listener that deletes unreferenced content_url entities. + * + * @author Derek Hulley + */ + public class ContentUrlDeleteTransactionListener extends TransactionListenerAdapter + { + @Override + public void beforeCommit(boolean readOnly) + { + // Ignore read-only + if (readOnly) + { + return; + } + Set contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS); long orphanTime = System.currentTimeMillis(); - for (String contentUrl : contentUrls) - { - ContentUrlEntity contentUrlEntity = getContentUrlEntityUnreferenced(contentUrl); - if (contentUrlEntity == null) - { - // It is still referenced, so ignore it - continue; - } + for (String contentUrl : contentUrls) + { + ContentUrlEntity contentUrlEntity = getContentUrlEntityUnreferenced(contentUrl); + if (contentUrlEntity == null) + { + // It is still referenced, so ignore it + continue; + } // We mark the URL as orphaned. - Long contentUrlId = contentUrlEntity.getId(); + Long contentUrlId = contentUrlEntity.getId(); updateContentUrlOrphanTime(contentUrlId, orphanTime); - // Pop this in the queue for deletion from the content store - contentStoreCleaner.registerOrphanedContentUrl(contentUrl); - } - contentUrls.clear(); - } - } -} + // Pop this in the queue for deletion from the content store + contentStoreCleaner.registerOrphanedContentUrl(contentUrl); + } + contentUrls.clear(); + } + } +} diff --git a/source/java/org/alfresco/repo/domain/contentdata/ContentDataDAO.java b/source/java/org/alfresco/repo/domain/contentdata/ContentDataDAO.java index fb900fed22..4d553b1df6 100644 --- a/source/java/org/alfresco/repo/domain/contentdata/ContentDataDAO.java +++ b/source/java/org/alfresco/repo/domain/contentdata/ContentDataDAO.java @@ -1,4 +1,4 @@ -/* +/* * Copyright (C) 2005-2010 Alfresco Software Limited. * * This file is part of Alfresco @@ -14,33 +14,34 @@ * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.contentdata; - + * along with Alfresco. If not, see . + */ +package org.alfresco.repo.domain.contentdata; + import java.util.List; -import java.util.Set; - -import org.alfresco.error.AlfrescoRuntimeException; -import org.alfresco.service.cmr.repository.ContentData; -import org.springframework.extensions.surf.util.Pair; -import org.springframework.dao.ConcurrencyFailureException; - -/** - * DAO services for alf_content_data table - * - * @author Derek Hulley - * @since 3.2 - */ -public interface ContentDataDAO -{ - /** - * Create a new ContentData instance. - * - * @param contentData the ContentData details - * @return the ContentData pair (id, ContentData) (never null) - */ - Pair createContentData(ContentData contentData); +import java.util.Set; + +import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.service.cmr.repository.ContentData; +import org.springframework.extensions.surf.util.Pair; +import org.springframework.dao.ConcurrencyFailureException; +import org.springframework.dao.DataIntegrityViolationException; + +/** + * DAO services for alf_content_data table + * + * @author Derek Hulley + * @since 3.2 + */ +public interface ContentDataDAO +{ + /** + * Create a new ContentData instance. + * + * @param contentData the ContentData details + * @return the ContentData pair (id, ContentData) (never null) + */ + Pair createContentData(ContentData contentData); /** * Update a content data instance @@ -49,46 +50,55 @@ public interface ContentDataDAO * @param contentData the new data */ void updateContentData(Long id, ContentData contentData); - - /** - * @param id the unique ID of the entity - * @return the ContentData pair (id, ContentData) or null if it doesn't exist - * @throws AlfrescoRuntimeException if the ID provided is invalid - */ - Pair getContentData(Long id); - - /** - * Delete an instance of content. - * @param id the unique ID of the entity - * @throws ConcurrencyFailureException if the ID does not exist - */ - void deleteContentData(Long id); - - /** - * Deletes all alf_content_data rows that are referenced by the given node - * - * @param nodeId the node ID - * @param qnameIds the content properties to target - */ - void deleteContentDataForNode(Long nodeId, Set qnameIds); - - /** - * Interface for callbacks during content URL enumeration - * - * @author Derek Hulley - * @since 3.2 - */ - public static interface ContentUrlHandler - { + + /** + * Creates an immediately-orphaned content URL, if possible + * + * @param contentUrl the URL to create if it doesn't exist + * @return Returns the ID-URL pair + * @throws DataIntegrityViolationException if the URL already exists + */ + Pair createContentUrlOrphaned(String contentUrl); + + /** + * @param id the unique ID of the entity + * @return the ContentData pair (id, ContentData) or null if it doesn't exist + * @throws AlfrescoRuntimeException if the ID provided is invalid + */ + Pair getContentData(Long id); + + /** + * Delete an instance of content. + * @param id the unique ID of the entity + * @throws ConcurrencyFailureException if the ID does not exist + */ + void deleteContentData(Long id); + + /** + * Deletes all alf_content_data rows that are referenced by the given node + * + * @param nodeId the node ID + * @param qnameIds the content properties to target + */ + void deleteContentDataForNode(Long nodeId, Set qnameIds); + + /** + * Interface for callbacks during content URL enumeration + * + * @author Derek Hulley + * @since 3.2 + */ + public static interface ContentUrlHandler + { void handle(Long id, String contentUrl, Long orphanTime); - } - - /** + } + + /** * Enumerate all available content URLs that were orphaned on or before the given time - * + * * @param contentUrlHandler the callback object to process the rows * @param maxOrphanTime the maximum orphan time - */ + */ void getContentUrlsOrphaned(ContentUrlHandler contentUrlHandler, long maxOrphanTime); /** @@ -104,4 +114,4 @@ public interface ContentDataDAO * Delete a batch of content URL entities. */ int deleteContentUrls(List ids); -} +} diff --git a/source/java/org/alfresco/repo/domain/contentdata/ibatis/ContentDataDAOImpl.java b/source/java/org/alfresco/repo/domain/contentdata/ibatis/ContentDataDAOImpl.java index 54d1d636e7..123760c9c2 100644 --- a/source/java/org/alfresco/repo/domain/contentdata/ibatis/ContentDataDAOImpl.java +++ b/source/java/org/alfresco/repo/domain/contentdata/ibatis/ContentDataDAOImpl.java @@ -18,16 +18,21 @@ */ package org.alfresco.repo.domain.contentdata.ibatis; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import org.alfresco.error.AlfrescoRuntimeException; +import org.alfresco.ibatis.IdsEntity; import org.alfresco.repo.domain.contentdata.AbstractContentDataDAOImpl; import org.alfresco.repo.domain.contentdata.ContentDataEntity; import org.alfresco.repo.domain.contentdata.ContentUrlEntity; import org.alfresco.service.cmr.repository.ContentData; +import org.springframework.dao.ConcurrencyFailureException; import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.extensions.surf.util.Pair; import org.springframework.orm.ibatis.SqlMapClientTemplate; import com.ibatis.sqlmap.client.event.RowHandler; @@ -60,6 +65,17 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl this.template = sqlMapClientTemplate; } + public Pair createContentUrlOrphaned(String contentUrl) + { + ContentUrlEntity contentUrlEntity = new ContentUrlEntity(); + contentUrlEntity.setContentUrl(contentUrl); + contentUrlEntity.setSize(0L); + contentUrlEntity.setOrphanTime(System.currentTimeMillis()); + Long id = (Long) template.insert(INSERT_CONTENT_URL, contentUrlEntity); + // Done + return new Pair(id, contentUrl); + } + @Override protected ContentUrlEntity createContentUrlEntity(String contentUrl, long size) { @@ -135,7 +151,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl } } - public int updateContentUrlOrphanTime(Long id, long orphanTime) + public int updateContentUrlOrphanTime(Long id, Long orphanTime) { ContentUrlEntity contentUrlEntity = new ContentUrlEntity(); contentUrlEntity.setId(id); @@ -178,7 +194,14 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl contentDataEntity.setMimetypeId(mimetypeId); contentDataEntity.setEncodingId(encodingId); contentDataEntity.setLocaleId(localeId); - template.insert(INSERT_CONTENT_DATA, contentDataEntity); + try + { + template.insert(INSERT_CONTENT_DATA, contentDataEntity); + } + catch (Throwable e) + { + throw new AlfrescoRuntimeException("Failed to insert ContentData: " + contentDataEntity, e); + } // Done return contentDataEntity; } @@ -226,23 +249,30 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl public void deleteContentDataForNode(Long nodeId, Set qnameIds) { - /* - * TODO: use IN clause in parameters - */ - for (Long qnameId : qnameIds) + if (qnameIds.size() == 0) { - // Get the ContentData that matches (may be multiple due to collection properties) - Map params = new HashMap(11); - params.put("nodeId", nodeId); - params.put("qnameId", qnameId); - @SuppressWarnings("unchecked") - List ids = (List) template.queryForList(SELECT_CONTENT_DATA_BY_NODE_AND_QNAME, params); - // Delete each one - for (Long id : ids) + // There will be no results + return; + } + IdsEntity idsEntity = new IdsEntity(); + idsEntity.setIdOne(nodeId); + idsEntity.setIds(new ArrayList(qnameIds)); + @SuppressWarnings("unchecked") + List ids = (List) template.queryForList(SELECT_CONTENT_DATA_BY_NODE_AND_QNAME, idsEntity); + // Delete each one + for (Long id : ids) + { + try { // Delete the ContentData entity deleteContentData(id); } + catch (ConcurrencyFailureException e) + { + // The DB may return results even though the row has just been + // deleted. Since we are deleting the row, it doesn't matter + // if it is deleted here or not. + } } } } diff --git a/source/java/org/alfresco/repo/domain/patch/AbstractPatchDAOImpl.java b/source/java/org/alfresco/repo/domain/patch/AbstractPatchDAOImpl.java index 8c9343066f..1c6c0d89de 100644 --- a/source/java/org/alfresco/repo/domain/patch/AbstractPatchDAOImpl.java +++ b/source/java/org/alfresco/repo/domain/patch/AbstractPatchDAOImpl.java @@ -19,8 +19,12 @@ package org.alfresco.repo.domain.patch; import java.util.List; +import java.util.Map; +import org.alfresco.ibatis.BatchingDAO; import org.alfresco.repo.domain.avm.AVMNodeEntity; +import org.alfresco.repo.domain.contentdata.ContentDataDAO; +import org.alfresco.service.cmr.repository.ContentData; /** @@ -31,8 +35,25 @@ import org.alfresco.repo.domain.avm.AVMNodeEntity; * @author janv * @since 3.2 */ -public abstract class AbstractPatchDAOImpl implements PatchDAO +public abstract class AbstractPatchDAOImpl implements PatchDAO, BatchingDAO { + private ContentDataDAO contentDataDAO; + + protected AbstractPatchDAOImpl() + { + } + + /** + * Set the DAO that supplies {@link ContentData} IDs + */ + public void setContentDataDAO(ContentDataDAO contentDataDAO) + { + this.contentDataDAO = contentDataDAO; + } + + /** + * {@inheritDoc} + */ public Long getAVMNodesCountWhereNewInStore() { return getAVMNodeEntitiesCountWhereNewInStore(); @@ -63,4 +84,91 @@ public abstract class AbstractPatchDAOImpl implements PatchDAO protected abstract List getNullVersionLayeredDirectoryNodeEntities(); protected abstract List getNullVersionLayeredFileNodeEntities(); + + /** + * {@inheritDoc} + *

+ * @see #getAdmOldContentProperties(Long, Long) + */ + public void updateAdmV31ContentProperties(Long minNodeId, Long maxNodeId) + { + List> props = getAdmOldContentProperties(minNodeId, maxNodeId); + + // Do a first pass to create the ContentData IDs + for (Map prop : props) + { + String stringValue = (String) prop.get("stringValue"); + + try + { + ContentData contentData = ContentData.createContentProperty(stringValue); + Long contentDataId = contentDataDAO.createContentData(contentData).getFirst(); + prop.put("contentDataId", contentDataId); + } + catch (Throwable e) + { + // We don't care about this too much as it'll just leak a binary + } + } + + // Now do the updates in the context of a batch + try + { + // Run using a batch + startBatch(); + + for (Map prop : props) + { + Long nodeId = (Long) prop.get("nodeId"); + Long qnameId = (Long) prop.get("qnameId"); + Integer listIndex = (Integer) prop.get("listIndex"); + Long localeId = (Long) prop.get("localeId"); + Long contentDataId = (Long) prop.get("contentDataId"); + if (contentDataId == null) + { + // There was a problem with this + continue; + } + // Update + updateAdmOldContentProperty(nodeId, qnameId, listIndex, localeId, contentDataId); + } + } + finally + { + executeBatch(); + } + } + + /** + * Results are of the form: + *

+     *      nodeId: java.lang.Long
+     *      qnameId: java.lang.Long
+     *      listIndex: java.lang.Integer
+     *      localeId: java.lang.Long
+     *      stringValue: java.lang.String
+     * 
+ * + * + * @param minNodeId inclusive lower bound for Node ID + * @param maxNodeId exclusive upper bound for Node ID + * @return Returns a map of query results + */ + protected abstract List> getAdmOldContentProperties(Long minNodeId, Long maxNodeId); + + /** + * + * @param nodeId part of the unique key + * @param qnameId part of the unique key + * @param listIndex part of the unique key + * @param localeId part of the unique key + * @param longValue the new ContentData ID + * @return Returns the row update count + */ + protected abstract void updateAdmOldContentProperty( + Long nodeId, + Long qnameId, + Integer listIndex, + Long localeId, + Long longValue); } diff --git a/source/java/org/alfresco/repo/domain/patch/PatchDAO.java b/source/java/org/alfresco/repo/domain/patch/PatchDAO.java index 604b751039..d9e8f86545 100644 --- a/source/java/org/alfresco/repo/domain/patch/PatchDAO.java +++ b/source/java/org/alfresco/repo/domain/patch/PatchDAO.java @@ -21,12 +21,14 @@ package org.alfresco.repo.domain.patch; import java.util.List; import org.alfresco.repo.domain.avm.AVMNodeEntity; - +import org.alfresco.repo.domain.contentdata.ContentDataDAO; +import org.alfresco.service.cmr.repository.ContentData; /** * Additional DAO services for patches * * @author janv + * @author Derek Hulley * @since 3.2 */ public interface PatchDAO @@ -40,4 +42,21 @@ public interface PatchDAO public List getNullVersionLayeredDirectories(int count); public List getNullVersionLayeredFiles(int count); + + public Long getMaxAvmNodeID(); + + public List getAvmNodesWithOldContentProperties(Long minNodeId, Long maxNodeId); + + // DM-related + + public Long getMaxAdmNodeID(); + + /** + * Migrates DM content properties from the old V3.1 format (String-based {@link ContentData#toString()}) + * to the new V3.2 format (ID based storage using {@link ContentDataDAO}). + * + * @param minNodeId the inclusive node ID to limit the updates to + * @param maxNodeId the exclusive node ID to limit the updates to + */ + public void updateAdmV31ContentProperties(Long minNodeId, Long maxNodeId); } diff --git a/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java b/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java index fd5ef55d41..aa4d6d1c68 100644 --- a/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java +++ b/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java @@ -18,8 +18,12 @@ */ package org.alfresco.repo.domain.patch.ibatis; +import java.sql.SQLException; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import org.alfresco.ibatis.IdsEntity; import org.alfresco.repo.domain.avm.AVMNodeEntity; import org.alfresco.repo.domain.patch.AbstractPatchDAOImpl; import org.springframework.orm.ibatis.SqlMapClientTemplate; @@ -36,6 +40,11 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl private static final String SELECT_AVM_NODE_ENTITIES_WITH_EMPTY_GUID = "alfresco.avm.select_AVMNodesWithEmptyGUID"; private static final String SELECT_AVM_LD_NODE_ENTITIES_NULL_VERSION = "alfresco.avm.select_AVMNodes_nullVersionLayeredDirectories"; private static final String SELECT_AVM_LF_NODE_ENTITIES_NULL_VERSION = "alfresco.avm.select_AVMNodes_nullVersionLayeredFiles"; + private static final String SELECT_AVM_MAX_NODE_ID = "alfresco.patch.select_avmMaxNodeId"; + private static final String SELECT_ADM_MAX_NODE_ID = "alfresco.patch.select_admMaxNodeId"; + private static final String SELECT_AVM_NODES_WITH_OLD_CONTENT_PROPERTIES = "alfresco.patch.select_avmNodesWithOldContentProperties"; + private static final String SELECT_ADM_OLD_CONTENT_PROPERTIES = "alfresco.patch.select_admOldContentProperties"; + private static final String UPDATE_ADM_OLD_CONTENT_PROPERTY = "alfresco.patch.update_admOldContentProperty"; private SqlMapClientTemplate template; @@ -44,6 +53,30 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl this.template = sqlMapClientTemplate; } + public void startBatch() + { + try + { + template.getSqlMapClient().startBatch(); + } + catch (SQLException e) + { + throw new RuntimeException("Failed to start batch", e); + } + } + + public void executeBatch() + { + try + { + template.getSqlMapClient().executeBatch(); + } + catch (SQLException e) + { + throw new RuntimeException("Failed to start batch", e); + } + } + @Override protected Long getAVMNodeEntitiesCountWhereNewInStore() { @@ -70,4 +103,45 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl { return (List) template.queryForList(SELECT_AVM_LF_NODE_ENTITIES_NULL_VERSION); } + + public Long getMaxAvmNodeID() + { + return (Long) template.queryForObject(SELECT_AVM_MAX_NODE_ID); + } + + @SuppressWarnings("unchecked") + public List getAvmNodesWithOldContentProperties(Long minNodeId, Long maxNodeId) + { + IdsEntity ids = new IdsEntity(); + ids.setIdOne(minNodeId); + ids.setIdTwo(maxNodeId); + return (List) template.queryForList(SELECT_AVM_NODES_WITH_OLD_CONTENT_PROPERTIES, ids); + } + + public Long getMaxAdmNodeID() + { + return (Long) template.queryForObject(SELECT_ADM_MAX_NODE_ID); + } + + @SuppressWarnings("unchecked") + @Override + protected List> getAdmOldContentProperties(Long minNodeId, Long maxNodeId) + { + IdsEntity ids = new IdsEntity(); + ids.setIdOne(minNodeId); + ids.setIdTwo(maxNodeId); + return (List>) template.queryForList(SELECT_ADM_OLD_CONTENT_PROPERTIES, ids); + } + + @Override + protected void updateAdmOldContentProperty(Long nodeId, Long qnameId, Integer listIndex, Long localeId, Long longValue) + { + Map params = new HashMap(11); + params.put("nodeId", nodeId); + params.put("qnameId", qnameId); + params.put("listIndex", listIndex); + params.put("localeId", localeId); + params.put("longValue", longValue); + template.update(UPDATE_ADM_OLD_CONTENT_PROPERTY, params); + } } diff --git a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java index 4619feb749..af8b16aba5 100644 --- a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java +++ b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java @@ -46,6 +46,7 @@ import org.alfresco.model.ContentModel; import org.alfresco.repo.cache.SimpleCache; import org.alfresco.repo.domain.AuditableProperties; import org.alfresco.repo.domain.ChildAssoc; +import org.alfresco.repo.domain.ContentDataId; import org.alfresco.repo.domain.DbAccessControlList; import org.alfresco.repo.domain.LocaleDAO; import org.alfresco.repo.domain.Node; @@ -4986,11 +4987,13 @@ public class HibernateNodeDaoServiceImpl " Value: " + value); } // Handle ContentData - if (value instanceof ContentData && propertyTypeQName.equals(DataTypeDefinition.CONTENT)) + // We used to check the property type, but we now handle d:any ContentData as well + if (value instanceof ContentData) { // Needs converting to an ID ContentData contentData = (ContentData) value; - value = contentDataDAO.createContentData(contentData).getFirst(); + Long contentDataId = contentDataDAO.createContentData(contentData).getFirst(); + value = new ContentDataId(contentDataId); } // Handle MLText if (value instanceof MLText) @@ -5374,8 +5377,24 @@ public class HibernateNodeDaoServiceImpl { Serializable value = propertyValue.getValue(propertyTypeQName); // Handle conversions to and from ContentData - if (propertyTypeQName.equals(DataTypeDefinition.CONTENT) && (value instanceof Long)) + if (value instanceof ContentDataId) { + // ContentData used to be persisted + Long contentDataId = ((ContentDataId) value).getId(); + Pair contentDataPair = contentDataDAO.getContentData(contentDataId); + if (contentDataPair == null) + { + // It is invalid + value = null; + } + else + { + value = contentDataPair.getSecond(); + } + } + else if (propertyTypeQName.equals(DataTypeDefinition.CONTENT) && (value instanceof Long)) + { + // ContentData used to be persisted Pair contentDataPair = contentDataDAO.getContentData((Long)value); if (contentDataPair == null) { diff --git a/source/java/org/alfresco/repo/node/index/AVMFullIndexRecoveryComponent.java b/source/java/org/alfresco/repo/node/index/AVMFullIndexRecoveryComponent.java index 6805bd3e31..af884ccc77 100644 --- a/source/java/org/alfresco/repo/node/index/AVMFullIndexRecoveryComponent.java +++ b/source/java/org/alfresco/repo/node/index/AVMFullIndexRecoveryComponent.java @@ -289,24 +289,22 @@ public class AVMFullIndexRecoveryComponent extends AbstractReindexComponent logger.info(" Rebuilding index for " + store); } - if (!avmSnapShotTriggeredIndexingMethodInterceptor.hasIndexBeenCreated(store)) - { - avmSnapShotTriggeredIndexingMethodInterceptor.createIndex(store); - } - final int latest = avmService.getLatestSnapshotID(store); if (latest <= 0) { + if (!avmSnapShotTriggeredIndexingMethodInterceptor.hasIndexBeenCreated(store)) + { + avmSnapShotTriggeredIndexingMethodInterceptor.createIndex(store); + } return; } - + final int latestIndexed = avmSnapShotTriggeredIndexingMethodInterceptor.getLastIndexedSnapshot(store); RetryingTransactionCallback reindexWork = new RetryingTransactionCallback() { public Object execute() throws Exception { - if (mode == RecoveryMode.AUTO) { logger.info(" Rebuilding index for snapshots " + latestIndexed +" to "+latest); diff --git a/source/java/org/alfresco/repo/security/sync/ChainingUserRegistrySynchronizer.java b/source/java/org/alfresco/repo/security/sync/ChainingUserRegistrySynchronizer.java index 59bb492bf8..a8af8b841e 100644 --- a/source/java/org/alfresco/repo/security/sync/ChainingUserRegistrySynchronizer.java +++ b/source/java/org/alfresco/repo/security/sync/ChainingUserRegistrySynchronizer.java @@ -38,7 +38,7 @@ import org.alfresco.repo.attributes.Attribute; import org.alfresco.repo.attributes.LongAttributeValue; import org.alfresco.repo.attributes.MapAttributeValue; import org.alfresco.repo.batch.BatchProcessor; -import org.alfresco.repo.batch.BatchProcessor.Worker; +import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker; import org.alfresco.repo.lock.JobLockService; import org.alfresco.repo.lock.LockAcquisitionException; import org.alfresco.repo.management.subsystems.ActivateableBean; @@ -538,10 +538,14 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl // First, analyze the group structure. Create maps of authorities to their parents for associations to create // and delete. Also deal with 'overlaps' with other zones in the authentication chain. final BatchProcessor groupProcessor = new BatchProcessor( - ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, - this.applicationEventPublisher, userRegistry.getGroups(lastModified), zone + " Group Analysis", - this.loggingInterval, this.workerThreads, 20); - class Analyzer implements Worker + zone + " Group Analysis", + this.retryingTransactionHelper, + userRegistry.getGroups(lastModified), + this.workerThreads, 20, + this.applicationEventPublisher, + ChainingUserRegistrySynchronizer.logger, + this.loggingInterval); + class Analyzer implements BatchProcessWorker { private final Set allZoneAuthorities = new TreeSet(); private final Map groupsToCreate = new TreeMap(); @@ -579,19 +583,27 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl return this.groupAssocsToDelete; } - /* - * (non-Javadoc) - * @see org.alfresco.repo.security.sync.BatchProcessor.Worker#getIdentifier(java.lang.Object) - */ public String getIdentifier(NodeDescription entry) { return entry.getSourceId(); } - /* - * (non-Javadoc) - * @see org.alfresco.repo.security.sync.BatchProcessor.Worker#process(java.lang.Object) - */ + public void beforeProcess() throws Throwable + { + // Disable rules + ruleService.disableRules(); + // Authentication + AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName()); + } + + public void afterProcess() throws Throwable + { + // Enable rules + ruleService.enableRules(); + // Clear authentication + AuthenticationUtil.clearCurrentSecurityContext(); + } + public void process(NodeDescription group) throws Throwable { PropertyMap groupProperties = group.getProperties(); @@ -801,17 +813,36 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl // Add the groups and their parent associations in depth-first order final Map groupsToCreate = groupAnalyzer.getGroupsToCreate(); BatchProcessor>> groupCreator = new BatchProcessor>>( - ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, - this.applicationEventPublisher, sortedGroupAssociations.entrySet(), zone - + " Group Creation and Association", this.loggingInterval, this.workerThreads, 20); - groupCreator.process(new Worker>>() + zone + " Group Creation and Association", + this.retryingTransactionHelper, + sortedGroupAssociations.entrySet(), + this.workerThreads, 20, + this.applicationEventPublisher, + ChainingUserRegistrySynchronizer.logger, + this.loggingInterval); + groupCreator.process(new BatchProcessWorker>>() { - public String getIdentifier(Map.Entry> entry) { return entry.getKey() + " " + entry.getValue(); } + public void beforeProcess() throws Throwable + { + // Disable rules + ruleService.disableRules(); + // Authentication + AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName()); + } + + public void afterProcess() throws Throwable + { + // Enable rules + ruleService.enableRules(); + // Clear authentication + AuthenticationUtil.clearCurrentSecurityContext(); + } + public void process(Map.Entry> entry) throws Throwable { Set parents = entry.getValue(); @@ -896,10 +927,14 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl } } final BatchProcessor personProcessor = new BatchProcessor( - ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, - this.applicationEventPublisher, userRegistry.getPersons(lastModified), zone - + " User Creation and Association", this.loggingInterval, this.workerThreads, 10); - class PersonWorker implements Worker + zone + " User Creation and Association", + this.retryingTransactionHelper, + userRegistry.getPersons(lastModified), + this.workerThreads, 10, + this.applicationEventPublisher, + ChainingUserRegistrySynchronizer.logger, + this.loggingInterval); + class PersonWorker implements BatchProcessWorker { private long latestTime; @@ -918,6 +953,22 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl return entry.getSourceId(); } + public void beforeProcess() throws Throwable + { + // Disable rules + ruleService.disableRules(); + // Authentication + AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName()); + } + + public void afterProcess() throws Throwable + { + // Enable rules + ruleService.enableRules(); + // Clear authentication + AuthenticationUtil.clearCurrentSecurityContext(); + } + public void process(NodeDescription person) throws Throwable { PropertyMap personProperties = person.getProperties(); @@ -1055,10 +1106,14 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl if (allowDeletions) { BatchProcessor authorityDeletionProcessor = new BatchProcessor( - ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, - this.applicationEventPublisher, deletionCandidates, zone + " Authority Deletion", - this.loggingInterval, this.workerThreads, 10); - class AuthorityDeleter implements Worker + zone + " Authority Deletion", + this.retryingTransactionHelper, + deletionCandidates, + this.workerThreads, 10, + this.applicationEventPublisher, + ChainingUserRegistrySynchronizer.logger, + this.loggingInterval); + class AuthorityDeleter implements BatchProcessWorker { private int personProcessedCount; private int groupProcessedCount; @@ -1078,6 +1133,22 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl return entry; } + public void beforeProcess() throws Throwable + { + // Disable rules + ruleService.disableRules(); + // Authentication + AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName()); + } + + public void afterProcess() throws Throwable + { + // Enable rules + ruleService.enableRules(); + // Clear authentication + AuthenticationUtil.clearCurrentSecurityContext(); + } + public void process(String authority) throws Throwable { if (AuthorityType.getAuthorityType(authority) == AuthorityType.USER) diff --git a/source/java/org/alfresco/util/TraceableThreadFactory.java b/source/java/org/alfresco/util/TraceableThreadFactory.java index d0e0d8b3d9..f0fde3372b 100644 --- a/source/java/org/alfresco/util/TraceableThreadFactory.java +++ b/source/java/org/alfresco/util/TraceableThreadFactory.java @@ -61,6 +61,9 @@ public class TraceableThreadFactory implements ThreadFactory this.namePrefix = "TraceableThread-" + factoryNumber.getAndIncrement() + "-thread-"; this.threadNumber = new AtomicInteger(1); + + this.threadDaemon = true; + this.threadPriority = Thread.NORM_PRIORITY; } /**