Merged BRANCHES/V3.2 to HEAD:

18363: WCM clustering - ETHREEOH-3962 (duplicate root node entry)
   19091: Fix Part 1 ALF-726: v3.1.x Content Cleaner Job needs to be ported to v3.2
   19159: Fixed ALF-726: Migrate pre-3.2 content URLs to new format and pick up tag existing orphaned content
   19169: Fix fallout from 19159 for ALF-726: Migrate pre-3.2 content URLs to new format and pick up tag existing orphaned content
   19262: ALF-726 Multithreading for content URL conversion



git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@19267 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2010-03-12 19:11:12 +00:00
parent a2c2e215a8
commit fdc8f6f331
33 changed files with 2589 additions and 1175 deletions

View File

@@ -37,7 +37,6 @@
</constructor-arg> </constructor-arg>
</bean> </bean>
<!-- Abstract bean definition defining base definition for content store cleaner -->
<!-- Performs the content cleanup --> <!-- Performs the content cleanup -->
<bean id="contentStoreCleaner" class="org.alfresco.repo.content.cleanup.ContentStoreCleaner" init-method="init"> <bean id="contentStoreCleaner" class="org.alfresco.repo.content.cleanup.ContentStoreCleaner" init-method="init">
<property name="protectDays" > <property name="protectDays" >

View File

@@ -17,6 +17,7 @@
<bean id="patchDAO" class="org.alfresco.repo.domain.patch.ibatis.PatchDAOImpl"> <bean id="patchDAO" class="org.alfresco.repo.domain.patch.ibatis.PatchDAOImpl">
<property name="sqlMapClientTemplate" ref="repoSqlMapClientTemplate"/> <property name="sqlMapClientTemplate" ref="repoSqlMapClientTemplate"/>
<property name="contentDataDAO" ref="contentDataDAO"/>
</bean> </bean>
<bean id="appliedPatchDAO" class="org.alfresco.repo.domain.patch.ibatis.AppliedPatchDAOImpl"> <bean id="appliedPatchDAO" class="org.alfresco.repo.domain.patch.ibatis.AppliedPatchDAOImpl">

View File

@@ -11,6 +11,7 @@
<typeHandler javaType="java.io.Serializable" jdbcType="BLOB" callback="org.alfresco.ibatis.SerializableTypeHandlerCallback"/> <typeHandler javaType="java.io.Serializable" jdbcType="BLOB" callback="org.alfresco.ibatis.SerializableTypeHandlerCallback"/>
<sqlMap resource="alfresco/ibatis/#resource.dialect#/appliedpatch-common-SqlMap.xml"/> <sqlMap resource="alfresco/ibatis/#resource.dialect#/appliedpatch-common-SqlMap.xml"/>
<sqlMap resource="alfresco/ibatis/#resource.dialect#/patch-common-SqlMap.xml"/>
<sqlMap resource="alfresco/ibatis/#resource.dialect#/qname-common-SqlMap.xml"/> <sqlMap resource="alfresco/ibatis/#resource.dialect#/qname-common-SqlMap.xml"/>
<sqlMap resource="alfresco/ibatis/#resource.dialect#/qname-insert-SqlMap.xml"/> <sqlMap resource="alfresco/ibatis/#resource.dialect#/qname-insert-SqlMap.xml"/>
<sqlMap resource="alfresco/ibatis/#resource.dialect#/locks-common-SqlMap.xml"/> <sqlMap resource="alfresco/ibatis/#resource.dialect#/locks-common-SqlMap.xml"/>

View File

@@ -15,6 +15,7 @@
<typeAlias alias="ContentUrl" type="org.alfresco.repo.domain.contentdata.ContentUrlEntity"/> <typeAlias alias="ContentUrl" type="org.alfresco.repo.domain.contentdata.ContentUrlEntity"/>
<typeAlias alias="ContentData" type="org.alfresco.repo.domain.contentdata.ContentDataEntity"/> <typeAlias alias="ContentData" type="org.alfresco.repo.domain.contentdata.ContentDataEntity"/>
<typeAlias alias="ContentClean" type="org.alfresco.repo.domain.contentclean.ContentCleanEntity"/> <typeAlias alias="ContentClean" type="org.alfresco.repo.domain.contentclean.ContentCleanEntity"/>
<typeAlias alias="Ids" type="org.alfresco.ibatis.IdsEntity"/>
<!-- --> <!-- -->
<!-- Result Maps --> <!-- Result Maps -->
@@ -191,7 +192,7 @@
from from
alf_content_url cu alf_content_url cu
where where
cu.orphan_time <= #orphanTime# cu.orphan_time <= #orphanTime# and cu.orphan_time is not null
]]> ]]>
</select> </select>
@@ -224,7 +225,7 @@
from from
alf_content_url alf_content_url
where where
orphan_time <= #orphanTime# orphan_time <= #orphanTime# and orphan_time is not null
]]> ]]>
</delete> </delete>
@@ -247,16 +248,15 @@
</select> </select>
<!-- Get the ContentData entity by Node and property QName --> <!-- Get the ContentData entity by Node and property QName -->
<select id="select_ContentDataByNodeAndQName" parameterMap="parameter_NodeAndQNameMap" resultClass="long"> <select id="select_ContentDataByNodeAndQName" parameterClass="Ids" resultClass="long">
select select
np.long_value as id np.long_value as id
from from
alf_node_properties np alf_node_properties np
where where
np.node_id = ? and np.node_id = #idOne# and
np.qname_id = ? and np.qname_id in <iterate property="ids" open="(" close=")" conjunction=",">#ids[]#</iterate> and
np.actual_type_n = 3 and (np.actual_type_n = 3 or np.actual_type_n = 21)
np.persisted_type_n = 3
</select> </select>
<update id="update_ContentData" parameterClass="ContentData"> <update id="update_ContentData" parameterClass="ContentData">

View File

@@ -0,0 +1,106 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE sqlMap
PUBLIC "-//ibatis.apache.org//DTD SQL Map 2.0//EN"
"http://ibatis.apache.org/dtd/sql-map-2.dtd">
<sqlMap namespace="alfresco.patch">
<!-- -->
<!-- Type Defs -->
<!-- -->
<typeAlias alias="Ids" type="org.alfresco.ibatis.IdsEntity"/>
<!-- -->
<!-- Result Maps -->
<!-- -->
<resultMap class="java.util.HashMap" id="result_admOldContentProp">
<result property="nodeId" column="node_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="qnameId" column="qname_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="listIndex" column="list_index" jdbcType="INTEGER" javaType="java.lang.Integer"/>
<result property="localeId" column="locale_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="stringValue" column="string_value" jdbcType="VARCHAR" javaType="java.lang.String"/>
</resultMap>
<!-- -->
<!-- Parameter Maps -->
<!-- -->
<parameterMap class="map" id="parameter_admNewContentProp">
<parameter property="longValue" jdbcType="BIGINT" javaType="java.lang.Long"/>
<parameter property="nodeId" jdbcType="BIGINT" javaType="java.lang.Long"/>
<parameter property="qnameId" jdbcType="BIGINT" javaType="java.lang.Long"/>
<parameter property="listIndex" jdbcType="INTEGER" javaType="java.lang.Integer"/>
<parameter property="localeId" jdbcType="BIGINT" javaType="java.lang.Long"/>
</parameterMap>
<!-- -->
<!-- Selects -->
<!-- -->
<select id="select_avmMaxNodeId" resultClass="java.lang.Long">
select max(id) from avm_nodes
</select>
<select id="select_admMaxNodeId" resultClass="java.lang.Long">
select max(id) from alf_node
</select>
<select id="select_avmNodesWithOldContentProperties" parameterClass="Ids" resultClass="java.lang.Long">
<![CDATA[
select
id
from
avm_nodes
where
id >= #idOne#
and id < #idTwo#
and class_type = 'plainfile'
and content_url is not null
and content_url not like 'id:%'
order by
id ASC
]]>
</select>
<select id="select_admOldContentProperties" parameterClass="Ids" resultMap="result_admOldContentProp">
<![CDATA[
select
node_id,
qname_id,
list_index,
locale_id,
string_value
from
alf_node_properties
where
node_id >= #idOne#
and node_id < #idTwo#
and actual_type_n = 11
order by
node_id ASC
]]>
</select>
<!-- -->
<!-- Updates -->
<!-- -->
<update id="update_admOldContentProperty" parameterMap="parameter_admNewContentProp">
update
alf_node_properties
set
actual_type_n = 21,
persisted_type_n = 3,
long_value = ?,
string_value = null
where
node_id = ?
and qname_id = ?
and list_index = ?
and locale_id = ?
</update>
</sqlMap>

View File

@@ -3,7 +3,7 @@ patch.service.preceeded_by_alternative=Preceeded by alternative patch ''{0}''.
patch.service.not_relevant=Not relevant to schema {0} patch.service.not_relevant=Not relevant to schema {0}
patch.executer.checking=Checking for patches to apply ... patch.executer.checking=Checking for patches to apply ...
patch.service.applying_patch=\tApplying patch ''{0}'' ({1}). patch.service.applying_patch=\tApplying patch ''{0}'' ({1}).
patch.progress=\t\tPatch {0}% complete, estimated complete at {1}. patch.progress=\t\tPatch {0} {1}% complete, estimated complete at {2}.
patch.validation.failed=Validation of patch ''{0}'' failed. Patch is applicable to a newer schema than the schema of this build ({1}).\nfixesToSchema: {2}\ntargetSchema: {3}.\nIf this patch should always be run once on every installation, please ensure that the ''fixesToSchema'' value is set to '''${version.schema}'''. patch.validation.failed=Validation of patch ''{0}'' failed. Patch is applicable to a newer schema than the schema of this build ({1}).\nfixesToSchema: {2}\ntargetSchema: {3}.\nIf this patch should always be run once on every installation, please ensure that the ''fixesToSchema'' value is set to '''${version.schema}'''.
patch.executer.no_patches_required=No patches were required. patch.executer.no_patches_required=No patches were required.
patch.executer.system_readonly=Patches cannot be applied to a read-only system. Possible incompatibilities may exist between the application code and the existing data. patch.executer.system_readonly=Patches cannot be applied to a read-only system. Possible incompatibilities may exist between the application code and the existing data.
@@ -11,6 +11,7 @@ patch.executer.not_executed =\n=== Recorded patch (not executed) === \nID: {0}\n
patch.executer.executed =\n=== Applied patch === \nID: {0}\nRESULT: \n{1}\n===================================== patch.executer.executed =\n=== Applied patch === \nID: {0}\nRESULT: \n{1}\n=====================================
patch.executer.failed =\n=== Failed to apply patch === \nID: {0}\nRESULT: \n{1}\n===================================== patch.executer.failed =\n=== Failed to apply patch === \nID: {0}\nRESULT: \n{1}\n=====================================
patch.noLongerSupportedPatch.err.use_incremental_upgrade = \nPatch ''{0}'' was last supported on version {1}.\n Please follow an incremental upgrade using version {2}. patch.noLongerSupportedPatch.err.use_incremental_upgrade = \nPatch ''{0}'' was last supported on version {1}.\n Please follow an incremental upgrade using version {2}.
# General patch messages # General patch messages
patch.genericBootstrap.result.exists=Bootstrap location already exists: {0} patch.genericBootstrap.result.exists=Bootstrap location already exists: {0}
@@ -19,6 +20,8 @@ patch.genericBootstrap.err.multiple_found=Multiple nodes found: {0}
patch.general.property_not_set=Patch property ''{0}'' has not been set on this patch: {1} patch.general.property_not_set=Patch property ''{0}'' has not been set on this patch: {1}
patch.QNamePatch.result=Successfully updated the ''{0}'' QName to ''{1}''.
# Individual patch messages # Individual patch messages
patch.noOpPatch.description=A placeholder patch; usually marks a superceded patch. patch.noOpPatch.description=A placeholder patch; usually marks a superceded patch.
@@ -301,4 +304,20 @@ patch.redeployNominatedInvitationProcessWithPropsForShare.description=Redeploy n
patch.redeployNominatedInvitationProcessWithPropsForShare.result=Nominated invitation workflow redeployed patch.redeployNominatedInvitationProcessWithPropsForShare.result=Nominated invitation workflow redeployed
patch.thumbnailsAssocQName.description=Update the 'cm:thumbnails' association QName to 'rn:rendition'. patch.thumbnailsAssocQName.description=Update the 'cm:thumbnails' association QName to 'rn:rendition'.
patch.QNamePatch.result=Successfully updated the ''{0}'' QName to ''{1}''.
patch.convertContentUrls.description=Converts pre-3.2 content URLs to use the alf_content_data table. The conversion work can also be done on a schedule; please contact Alfresco Support for further details.
patch.convertContentUrls.bypassingPatch=Content URL conversion was NOT performed by this patch. Activate the scheduled job 'contentUrlConverterTrigger'.
patch.convertContentUrls.start=Content URL conversion progress:
patch.convertContentUrls.inProgress=Content URL conversion increment completed. Awaiting next scheduled call...
patch.convertContentUrls.done=Content URL conversion completed.
patch.convertContentUrls.adm.start=\tProcessing ADM Content URLs.
patch.convertContentUrls.adm.done=\tFinished processing ADM nodes up to ID {0}.
patch.convertContentUrls.avm.start=\tProcessing AVM Content URLs.
patch.convertContentUrls.avm.done=\tFinished processing AVM nodes up to ID {0}.
patch.convertContentUrls.store.start=\tReading content URLs from store {0}.
patch.convertContentUrls.store.readOnly=\tNo content URLs will be marked for deletion. The content store is read-only.
patch.convertContentUrls.store.pending=\tContent URLs will be marked for deletion once the URL conversion process is complete.
patch.convertContentUrls.store.noSupport=\tNo content URLs will be marked for deletion. The store does not support URL enumeration.
patch.convertContentUrls.store.progress=\t\tProcessed {0} content URLs from store.
patch.convertContentUrls.store.scheduled=\tScheduled {0} content URLs for deletion from store: {1}
patch.convertContentUrls.store.done=This job is complete. Deactivate the scheduled job 'contentUrlConverterTrigger'.

View File

@@ -56,8 +56,8 @@
<default>0</default> <default>0</default>
</property> </property>
<property name="sys:versionEdition"> <property name="sys:versionEdition">
<type>d:any</type> <type>d:content</type>
<multiple>true</multiple> <multiple>false</multiple>
</property> </property>
<property name="sys:versionProperties"> <property name="sys:versionProperties">
<type>d:content</type> <type>d:content</type>

View File

@@ -1957,6 +1957,7 @@
<property name="fixesFromSchema"><value>0</value></property> <property name="fixesFromSchema"><value>0</value></property>
<property name="fixesToSchema"><value>3006</value></property> <property name="fixesToSchema"><value>3006</value></property>
<property name="targetSchema"><value>3007</value></property> <property name="targetSchema"><value>3007</value></property>
<property name="applyToTenants"><value>false</value></property>
<property name="dependsOn" > <property name="dependsOn" >
<list> <list>
<ref bean="patch.uniqueChildName" /> <ref bean="patch.uniqueChildName" />
@@ -1970,6 +1971,9 @@
<property name="nodeDaoService"> <property name="nodeDaoService">
<ref bean="nodeDaoService" /> <ref bean="nodeDaoService" />
</property> </property>
<property name="qnameDAO">
<ref bean="qnameDAO" />
</property>
<property name="ruleService"> <property name="ruleService">
<ref bean="ruleService" /> <ref bean="ruleService" />
</property> </property>
@@ -2100,5 +2104,41 @@
</props> </props>
</property> </property>
</bean> </bean>
<bean id="patch.convertContentUrls" class="org.alfresco.repo.admin.patch.impl.ContentUrlConverterPatch" parent="basePatch">
<property name="id"><value>patch.convertContentUrls</value></property>
<property name="description"><value>patch.convertContentUrls.description</value></property>
<property name="fixesFromSchema"><value>0</value></property>
<property name="fixesToSchema"><value>3499</value></property>
<property name="targetSchema"><value>3500</value></property>
<property name="applyToTenants"><value>false</value></property>
<property name="registryService">
<ref bean="registryService"/>
</property>
<property name="jobLockService">
<ref bean="jobLockService"/>
</property>
<property name="nodeDaoService">
<ref bean="nodeDaoService"/>
</property>
<property name="patchDAO">
<ref bean="patchDAO"/>
</property>
<property name="contentDataDAO">
<ref bean="contentDataDAO"/>
</property>
<property name="contentStore">
<ref bean="fileContentStore"/>
</property>
<property name="threadCount">
<value>${system.content.contentUrlConverter.threadCount}</value>
</property>
<property name="batchSize">
<value>${system.content.contentUrlConverter.batchSize}</value>
</property>
<property name="runAsScheduledJob">
<value>${system.content.contentUrlConverter.runAsScheduledJob}</value>
</property>
</bean>
</beans> </beans>

View File

@@ -142,6 +142,12 @@ system.content.eagerOrphanCleanup=false
system.content.orphanProtectDays=14 system.content.orphanProtectDays=14
# The CRON expression to trigger the deletion of resources associated with orphaned content. # The CRON expression to trigger the deletion of resources associated with orphaned content.
system.content.orphanCleanup.cronExpression=0 0 4 * * ? system.content.orphanCleanup.cronExpression=0 0 4 * * ?
# The CRON expression to trigger content URL conversion. This process is not intesive and can
# be triggered on a live system. Similarly, it can be triggered using JMX on a dedicated machine.
system.content.contentUrlConverter.cronExpression=* * * * * ? 2099
system.content.contentUrlConverter.threadCount=2
system.content.contentUrlConverter.batchSize=500
system.content.contentUrlConverter.runAsScheduledJob=false
# #################### # # #################### #
# Lucene configuration # # Lucene configuration #

View File

@@ -109,6 +109,30 @@
</property> </property>
</bean> </bean>
<bean id="contentUrlConverterJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass">
<value>org.alfresco.repo.admin.patch.impl.ContentUrlConverterPatch$ContentUrlConverterJob</value>
</property>
<property name="jobDataAsMap">
<map>
<entry key="contentUrlConverter">
<ref bean="patch.convertContentUrls" />
</entry>
</map>
</property>
</bean>
<bean id="contentUrlConverterTrigger" class="org.alfresco.util.CronTriggerBean">
<property name="jobDetail">
<ref bean="contentUrlConverterJobDetail" />
</property>
<property name="scheduler">
<ref bean="schedulerFactory" />
</property>
<property name="cronExpression">
<value>${system.content.contentUrlConverter.cronExpression}</value>
</property>
</bean>
<bean id="nodeServiceCleanupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean"> <bean id="nodeServiceCleanupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass"> <property name="jobClass">
<value>org.alfresco.repo.node.cleanup.NodeCleanupJob</value> <value>org.alfresco.repo.node.cleanup.NodeCleanupJob</value>

View File

@@ -19,4 +19,4 @@ version.build=@build-number@
# Schema number # Schema number
version.schema=4005 version.schema=4006

File diff suppressed because it is too large Load Diff

View File

@@ -49,8 +49,6 @@ import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.RegexQNamePattern; import org.alfresco.service.namespace.RegexQNamePattern;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.ApplicationEventPublisherAware;
/** /**
* Migrates authority information previously stored in the user store to the spaces store, using the new structure used * Migrates authority information previously stored in the user store to the spaces store, using the new structure used
@@ -58,7 +56,7 @@ import org.springframework.context.ApplicationEventPublisherAware;
* *
* @author dward * @author dward
*/ */
public class AuthorityMigrationPatch extends AbstractPatch implements ApplicationEventPublisherAware public class AuthorityMigrationPatch extends AbstractPatch
{ {
/** The title we give to the batch process in progress messages / JMX. */ /** The title we give to the batch process in progress messages / JMX. */
private static final String MSG_PROCESS_NAME = "patch.authorityMigration.process.name"; private static final String MSG_PROCESS_NAME = "patch.authorityMigration.process.name";
@@ -91,9 +89,6 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio
/** The user bootstrap. */ /** The user bootstrap. */
private ImporterBootstrap userBootstrap; private ImporterBootstrap userBootstrap;
/** The application event publisher. */
private ApplicationEventPublisher applicationEventPublisher;
/** /**
* Sets the authority service. * Sets the authority service.
* *
@@ -127,17 +122,6 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio
this.userBootstrap = userBootstrap; this.userBootstrap = userBootstrap;
} }
/**
* Sets the application event publisher.
*
* @param applicationEventPublisher
* the application event publisher
*/
public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher)
{
this.applicationEventPublisher = applicationEventPublisher;
}
/** /**
* Recursively retrieves the authorities under the given node and their associations. * Recursively retrieves the authorities under the given node and their associations.
* *
@@ -238,14 +222,33 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio
*/ */
private void migrateAuthorities(final Map<String, String> authoritiesToCreate, Map<String, Set<String>> parentAssocs) private void migrateAuthorities(final Map<String, String> authoritiesToCreate, Map<String, Set<String>> parentAssocs)
{ {
BatchProcessor.Worker<Map.Entry<String, Set<String>>> worker = new BatchProcessor.Worker<Map.Entry<String, Set<String>>>() final String tenantDomain = tenantAdminService.getCurrentUserDomain();
{
BatchProcessor.BatchProcessWorker<Map.Entry<String, Set<String>>> worker = new BatchProcessor.BatchProcessWorker<Map.Entry<String, Set<String>>>()
{
public String getIdentifier(Entry<String, Set<String>> entry) public String getIdentifier(Entry<String, Set<String>> entry)
{ {
return entry.getKey(); return entry.getKey();
} }
public void beforeProcess() throws Throwable
{
// Disable rules
ruleService.disableRules();
// Authentication
String systemUser = AuthenticationUtil.getSystemUserName();
systemUser = tenantAdminService.getDomainUser(systemUser, tenantDomain);
AuthenticationUtil.setRunAsUser(systemUser);
}
public void afterProcess() throws Throwable
{
// Enable rules
ruleService.enableRules();
// Clear authentication
AuthenticationUtil.clearCurrentSecurityContext();
}
public void process(Entry<String, Set<String>> authority) throws Throwable public void process(Entry<String, Set<String>> authority) throws Throwable
{ {
String authorityName = authority.getKey(); String authorityName = authority.getKey();
@@ -290,10 +293,13 @@ public class AuthorityMigrationPatch extends AbstractPatch implements Applicatio
} }
}; };
// Migrate using 2 threads, 20 authorities per transaction. Log every 100 entries. // Migrate using 2 threads, 20 authorities per transaction. Log every 100 entries.
new BatchProcessor<Map.Entry<String, Set<String>>>(AuthorityMigrationPatch.progress_logger, new BatchProcessor<Map.Entry<String, Set<String>>>(
this.transactionService.getRetryingTransactionHelper(), this.ruleService, this.tenantAdminService, I18NUtil.getMessage(AuthorityMigrationPatch.MSG_PROCESS_NAME),
this.applicationEventPublisher, parentAssocs.entrySet(), I18NUtil this.transactionService.getRetryingTransactionHelper(),
.getMessage(AuthorityMigrationPatch.MSG_PROCESS_NAME), 100, 2, 20).process(worker, true); parentAssocs.entrySet(),
2, 20,
AuthorityMigrationPatch.this.applicationEventPublisher,
AuthorityMigrationPatch.progress_logger, 100).process(worker, true);
} }
/** /**

View File

@@ -0,0 +1,687 @@
/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.admin.patch.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.admin.patch.PatchExecuter;
import org.alfresco.repo.admin.registry.RegistryKey;
import org.alfresco.repo.admin.registry.RegistryService;
import org.alfresco.repo.avm.AVMDAOs;
import org.alfresco.repo.avm.PlainFileNode;
import org.alfresco.repo.batch.BatchProcessor;
import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorkerAdaptor;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.repo.domain.patch.PatchDAO;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.repo.lock.LockAcquisitionException;
import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.VmShutdownListener;
import org.alfresco.util.VmShutdownListener.VmShutdownException;
import org.apache.commons.lang.mutable.MutableInt;
import org.apache.commons.lang.mutable.MutableLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.extensions.surf.util.I18NUtil;
import org.springframework.extensions.surf.util.Pair;
import org.springframework.extensions.surf.util.PropertyCheck;
/**
* Component to migrate old-style content URL storage (<tt>contentUrl=store://...|mimetype=...</tt>)
* to the newer <b>alf_content_url</b> storage.
* <p/>
* The {@link ServiceRegistry} is used to record progress. The component picks up ranges of node IDs
* (DM and AVM) and records the progress. Since new nodes will not need converting, the converter
* will stop once it hits the largest node ID that it found upon first initiation. Once completed,
* the content store reader will start to pick up orphaned content and schedule it for deletion.
* <p/>
* A cluster-wide lock is set so that a single instance of this job will be running per Alfresco
* installation.
*
* @author Derek Hulley
* @since 3.2.1
*/
public class ContentUrlConverterPatch extends AbstractPatch
{
// Registry keys
private static final RegistryKey KEY_ADM_MAX_ID = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "max-id");
private static final RegistryKey KEY_ADM_RANGE_START_ID = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "range-start-id");
private static final RegistryKey KEY_ADM_DONE = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "adm", "done");
private static final RegistryKey KEY_AVM_MAX_ID = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "avm", "max-id");
private static final RegistryKey KEY_AVM_RANGE_START_ID = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "avm", "range-start-id");
private static final RegistryKey KEY_AVM_DONE = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "avm", "done");
private static final RegistryKey KEY_STORE_DONE = new RegistryKey(
NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter", "store", "done");
// Lock key
private static final QName LOCK = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentUrlConverter");
// Lock as per patching
private static Log logger = LogFactory.getLog(PatchExecuter.class);
private static VmShutdownListener shutdownListener = new VmShutdownListener("ContentUrlConverterPatch");
private RegistryService registryService;
private JobLockService jobLockService;
private NodeDaoService nodeDaoService;
private PatchDAO patchDAO;
private ContentStore contentStore;
private ContentDataDAO contentDataDAO;
private int threadCount;
private int batchSize;
private boolean runAsScheduledJob;
private ThreadLocal<Boolean> runningAsJob = new ThreadLocal<Boolean>();
/**
* Default constructor
*/
public ContentUrlConverterPatch()
{
runningAsJob.set(Boolean.FALSE);
threadCount = 2;
batchSize=500;
}
/**
* Service to record progress for later pick-up
*/
public void setRegistryService(RegistryService registryService)
{
this.registryService = registryService;
}
/**
* Service to prevent concurrent execution
*/
public void setJobLockService(JobLockService jobLockService)
{
this.jobLockService = jobLockService;
}
/**
* Provides low-level access to do the property transformation
*/
public void setNodeDaoService(NodeDaoService nodeDaoService)
{
this.nodeDaoService = nodeDaoService;
}
/**
* Component that provides low-level queries and updates to support this patch
*/
public void setPatchDAO(PatchDAO patchDAO)
{
this.patchDAO = patchDAO;
}
/**
* Set the store containing the content URLs to lift for potential cleaning.
*
* @param contentStore the store containing the system's content URLs
*/
public void setContentStore(ContentStore contentStore)
{
this.contentStore = contentStore;
}
/**
* Set the component that will write URLs coming from the
* {@link ContentStore#getUrls(ContentUrlHandler) content store}.
*
* @param contentDataDAO the DAO to write the URLs
*/
public void setContentDataDAO(ContentDataDAO contentDataDAO)
{
this.contentDataDAO = contentDataDAO;
}
/**
* Set the number of threads that will be used process the required work.
*
* @param threadCount the number of threads
*/
public void setThreadCount(int threadCount)
{
this.threadCount = threadCount;
}
/**
* Set the number of URLs that are processed per job pass; this property is ignored
* when this component is run as a patch. Keep the number low (500) when running
* at short intervals on a on a live machine.
*
* @param batchSize the number of nodes to process per batch when running on a schedule
*/
public void setBatchSize(int batchSize)
{
this.batchSize = batchSize;
}
/**
* Set whether the patch execution should just bypass any actual work i.e. the admin has
* chosen to manually trigger the work.
*
* @param runAsScheduledJob <tt>true</tt> to leave all work up to the scheduled job
*/
public void setRunAsScheduledJob(boolean runAsScheduledJob)
{
this.runAsScheduledJob = runAsScheduledJob;
}
@Override
protected void checkProperties()
{
PropertyCheck.mandatory(this, "registryService", registryService);
PropertyCheck.mandatory(this, "jobLockService", jobLockService);
PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService);
PropertyCheck.mandatory(this, "patchDAO", patchDAO);
super.checkProperties();
}
/**
* Method called when executed as a scheduled job.
*/
private void executeViaJob()
{
AuthenticationUtil.RunAsWork<String> patchRunAs = new AuthenticationUtil.RunAsWork<String>()
{
public String doWork() throws Exception
{
RetryingTransactionCallback<String> patchTxn = new RetryingTransactionCallback<String>()
{
public String execute() throws Exception
{
try
{
runningAsJob.set(Boolean.TRUE);
String report = applyInternal();
// done
return report;
}
finally
{
runningAsJob.set(Boolean.FALSE); // Back to default
}
}
};
return transactionService.getRetryingTransactionHelper().doInTransaction(patchTxn);
}
};
String report = AuthenticationUtil.runAs(patchRunAs, AuthenticationUtil.getSystemUserName());
if (report != null)
{
logger.info(report);
}
}
/**
* Gets a set of work to do and executes it within this transaction. If kicked off via a job,
* the task will exit before completion, on the assumption that it will be kicked off at regular
* intervals. When called as a patch, it will run to completion with full progress logging.
*/
@Override
protected String applyInternal() throws Exception
{
if (AlfrescoTransactionSupport.getTransactionReadState() != TxnReadState.TXN_READ_WRITE)
{
// Nothing to do
return null;
}
boolean isRunningAsJob = runningAsJob.get().booleanValue();
// Do we bug out of patch execution
if (runAsScheduledJob && !isRunningAsJob)
{
return I18NUtil.getMessage("patch.convertContentUrls.bypassingPatch");
}
boolean completed = false;
// Lock in proportion to the batch size (0.1s per node or 0.8 min per 500)
String lockToken = getLock(batchSize*100L);
if (lockToken == null)
{
// Some other process is busy
if (isRunningAsJob)
{
// Fine, we're doing batches
return null;
}
else
{
throw new RuntimeException("Unable to get job lock during patch execution. Only one server should perform the upgrade.");
}
}
try
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.start"));
logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.start"));
boolean admCompleted = applyADM(lockToken);
logger.info(I18NUtil.getMessage("patch.convertContentUrls.avm.start"));
boolean avmCompleted = applyAVM(lockToken);
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.start", contentStore));
boolean urlLiftingCompleted = applyUrlLifting(lockToken);
completed = admCompleted && avmCompleted && urlLiftingCompleted;
}
finally
{
jobLockService.releaseLock(lockToken, LOCK);
}
if (completed)
{
return I18NUtil.getMessage("patch.convertContentUrls.done");
}
else
{
return I18NUtil.getMessage("patch.convertContentUrls.inProgress");
}
}
/**
* Attempts to get the lock. If the lock couldn't be taken, then <tt>null</tt> is returned.
*
* @return Returns the lock token or <tt>null</tt>
*/
private String getLock(long time)
{
try
{
return jobLockService.getLock(LOCK, time);
}
catch (LockAcquisitionException e)
{
return null;
}
}
/**
* Attempts to get the lock. If it fails, the current transaction is marked for rollback.
*
* @return Returns the lock token
*/
private void refreshLock(String lockToken, long time)
{
if (lockToken == null)
{
throw new IllegalArgumentException("Must provide existing lockToken");
}
jobLockService.refreshLock(lockToken, LOCK, time);
}
private boolean applyADM(final String lockToken)
{
RetryingTransactionCallback<Boolean> callback = new RetryingTransactionCallback<Boolean>()
{
public Boolean execute() throws Throwable
{
return applyADM();
}
};
boolean done = false;
while (true && !shutdownListener.isVmShuttingDown())
{
refreshLock(lockToken, batchSize*100L);
done = transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true);
if (done)
{
break;
}
}
return done;
}
/**
* Do the DM conversion work
* @return Returns <tt>true</tt> if the work is done
*/
private boolean applyADM() throws Exception
{
Long maxId = (Long) registryService.getProperty(KEY_ADM_MAX_ID);
// Must we run at all?
Boolean done = (Boolean) registryService.getProperty(KEY_ADM_DONE);
if (done != null && done.booleanValue())
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.done", maxId));
return true;
}
if (maxId == null)
{
maxId = patchDAO.getMaxAdmNodeID();
registryService.addProperty(KEY_ADM_MAX_ID, maxId);
}
Long startId = (Long) registryService.getProperty(KEY_ADM_RANGE_START_ID);
if (startId == null)
{
startId = 1L;
registryService.addProperty(KEY_ADM_RANGE_START_ID, startId);
}
// Each thread gets 10 executions i.e. we get ranges for threadCount*10 lots of work
Long endId = startId;
Collection<Pair<Long, Long>> batchProcessorWork = new ArrayList<Pair<Long,Long>>(2);
for (long i = 0; i < threadCount*10; i++)
{
endId = startId + (i+1L) * batchSize;
Pair<Long, Long> batchEntry = new Pair<Long, Long>(
startId + i * batchSize,
endId);
batchProcessorWork.add(batchEntry);
}
BatchProcessWorkerAdaptor<Pair<Long, Long>> batchProcessorWorker = new BatchProcessWorkerAdaptor<Pair<Long, Long>>()
{
public void process(Pair<Long, Long> range) throws Throwable
{
Long startId = range.getFirst();
Long endId = range.getSecond();
// Bulk-update the old content properties
patchDAO.updateAdmV31ContentProperties(startId, endId);
}
};
BatchProcessor<Pair<Long, Long>> batchProcessor = new BatchProcessor<Pair<Long, Long>>(
"ContentUrlConverter.ADM (" + maxId + ")",
transactionService.getRetryingTransactionHelper(),
batchProcessorWork, threadCount, 1,
applicationEventPublisher, null, 1);
batchProcessor.process(batchProcessorWorker, true);
// Advance
startId = endId;
// Have we
if (startId > maxId)
{
startId = maxId + 1;
// We're past the max ID that we're interested in
done = Boolean.TRUE;
registryService.addProperty(KEY_ADM_DONE, done);
logger.info(I18NUtil.getMessage("patch.convertContentUrls.adm.done", maxId));
return true;
}
// Progress
super.reportProgress(maxId, startId);
// Move the start ID on
registryService.addProperty(KEY_ADM_RANGE_START_ID, startId);
// More to do
return false;
}
private boolean applyAVM(final String lockToken)
{
RetryingTransactionCallback<Boolean> callback = new RetryingTransactionCallback<Boolean>()
{
public Boolean execute() throws Throwable
{
return applyAVM();
}
};
boolean done = false;
while (true && !shutdownListener.isVmShuttingDown())
{
refreshLock(lockToken, batchSize*100L);
done = transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true);
if (done)
{
break;
}
}
return done;
}
/**
* Do the AVM conversion work
*/
private boolean applyAVM() throws Exception
{
Long maxId = (Long) registryService.getProperty(KEY_AVM_MAX_ID);
// Must we run at all?
Boolean done = (Boolean) registryService.getProperty(KEY_AVM_DONE);
if (done != null && done.booleanValue())
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.avm.done", maxId));
return true;
}
if (maxId == null)
{
maxId = patchDAO.getMaxAvmNodeID();
registryService.addProperty(KEY_AVM_MAX_ID, maxId);
}
Long startId = (Long) registryService.getProperty(KEY_AVM_RANGE_START_ID);
if (startId == null)
{
startId = 1L;
registryService.addProperty(KEY_AVM_RANGE_START_ID, startId);
}
Long endId = startId + (batchSize * (long) threadCount * 10L);
final List<Long> nodeIds = patchDAO.getAvmNodesWithOldContentProperties(startId, endId);
BatchProcessWorkerAdaptor<Long> batchProcessorWorker = new BatchProcessWorkerAdaptor<Long>()
{
public void process(Long nodeId) throws Throwable
{
// Convert it
PlainFileNode node = (PlainFileNode) AVMDAOs.Instance().fAVMNodeDAO.getByID(nodeId);
ContentData contentData = node.getContentData();
node.setContentData(contentData);
AVMDAOs.Instance().fAVMNodeDAO.update(node);
}
};
BatchProcessor<Long> batchProcessor = new BatchProcessor<Long>(
"ContentUrlConverter.AVM (" + maxId + ")",
transactionService.getRetryingTransactionHelper(),
nodeIds, threadCount, batchSize,
applicationEventPublisher, null, 1);
batchProcessor.process(batchProcessorWorker, true);
// Advance
startId = endId;
// Have we
if (startId > maxId)
{
startId = maxId + 1;
// We're past the max ID that we're interested in
done = Boolean.TRUE;
registryService.addProperty(KEY_AVM_DONE, done);
logger.info(I18NUtil.getMessage("patch.convertContentUrls.avm.done", maxId));
return true;
}
// Progress
super.reportProgress(maxId, startId);
// Move the start ID on
registryService.addProperty(KEY_AVM_RANGE_START_ID, startId);
// More to do
return false;
}
private boolean applyUrlLifting(final String lockToken) throws Exception
{
RetryingTransactionCallback<Boolean> callback = new RetryingTransactionCallback<Boolean>()
{
public Boolean execute() throws Throwable
{
return applyUrlLiftingInTxn(lockToken);
}
};
return transactionService.getRetryingTransactionHelper().doInTransaction(callback, false, true);
}
private boolean applyUrlLiftingInTxn(final String lockToken) throws Exception
{
// Check the store
if (!contentStore.isWriteSupported())
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.readOnly"));
return true;
}
Boolean admDone = (Boolean) registryService.getProperty(KEY_ADM_DONE);
Boolean avmDone = (Boolean) registryService.getProperty(KEY_AVM_DONE);
if ((admDone == null || !admDone.booleanValue()) || (avmDone == null || !avmDone.booleanValue()))
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.pending"));
return false;
}
// Must we run at all?
Boolean done = (Boolean) registryService.getProperty(KEY_STORE_DONE);
if (done != null && done.booleanValue())
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.done"));
return true;
}
final long totalSize = contentStore.getTotalSize();
final MutableLong currentSize = new MutableLong(0L);
final MutableInt count = new MutableInt();
count.setValue(0);
ContentUrlHandler handler = new ContentUrlHandler()
{
private int allCount = 0;
public void handle(String contentUrl)
{
if (shutdownListener.isVmShuttingDown())
{
throw new VmShutdownListener.VmShutdownException();
}
ContentReader reader = contentStore.getReader(contentUrl);
if (!reader.exists())
{
// Not there any more
return;
}
currentSize.setValue(currentSize.longValue() + reader.getSize());
try
{
contentDataDAO.createContentUrlOrphaned(contentUrl);
count.setValue(count.intValue()+1);
}
catch (DataIntegrityViolationException e)
{
// That's OK, the URL was already managed
}
allCount++;
if (allCount % batchSize == 0)
{
// Update our lock
refreshLock(lockToken, batchSize*100L);
if (totalSize < 0)
{
// Report
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.progress", allCount));
}
else
{
ContentUrlConverterPatch.super.reportProgress(totalSize, currentSize.longValue());
}
}
}
};
try
{
contentStore.getUrls(handler);
}
catch (UnsupportedOperationException e)
{
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.noSupport"));
}
catch (VmShutdownException e)
{
// We didn't manage to complete
return false;
}
// Record the completion
done = Boolean.TRUE;
registryService.addProperty(KEY_STORE_DONE, done);
// Done
logger.info(I18NUtil.getMessage("patch.convertContentUrls.store.scheduled", count.intValue(), contentStore));
return true;
}
/**
* Job to initiate the {@link ContentUrlConverterPatch}
*
* @author Derek Hulley
* @since 3.2.1
*/
public static class ContentUrlConverterJob implements Job
{
public ContentUrlConverterJob()
{
}
/**
* Calls the cleaner to do its work
*/
public void execute(JobExecutionContext context) throws JobExecutionException
{
JobDataMap jobData = context.getJobDetail().getJobDataMap();
// extract the content cleaner to use
Object contentUrlConverterObj = jobData.get("contentUrlConverter");
if (contentUrlConverterObj == null || !(contentUrlConverterObj instanceof ContentUrlConverterPatch))
{
throw new AlfrescoRuntimeException(
"'contentUrlConverter' data must contain valid 'ContentUrlConverter' reference");
}
ContentUrlConverterPatch contentUrlConverter = (ContentUrlConverterPatch) contentUrlConverterObj;
contentUrlConverter.executeViaJob();
}
}
}

View File

@@ -32,12 +32,13 @@ import java.util.zip.CRC32;
import org.alfresco.model.ContentModel; import org.alfresco.model.ContentModel;
import org.alfresco.repo.admin.patch.AbstractPatch; import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.batch.BatchProcessor; import org.alfresco.repo.batch.BatchProcessor;
import org.alfresco.repo.batch.BatchProcessor.Worker; import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
import org.alfresco.repo.domain.ChildAssoc; import org.alfresco.repo.domain.ChildAssoc;
import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.hibernate.ChildAssocImpl; import org.alfresco.repo.domain.hibernate.ChildAssocImpl;
import org.alfresco.repo.domain.qname.QNameDAO; import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.repo.node.db.NodeDaoService; import org.alfresco.repo.node.db.NodeDaoService;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.service.cmr.admin.PatchException; import org.alfresco.service.cmr.admin.PatchException;
import org.alfresco.service.cmr.rule.RuleService; import org.alfresco.service.cmr.rule.RuleService;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
@@ -48,8 +49,6 @@ import org.hibernate.Session;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.hibernate.type.LongType; import org.hibernate.type.LongType;
import org.hibernate.type.StringType; import org.hibernate.type.StringType;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.ApplicationEventPublisherAware;
import org.springframework.extensions.surf.util.I18NUtil; import org.springframework.extensions.surf.util.I18NUtil;
import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
@@ -61,7 +60,7 @@ import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
* @author Derek Hulley * @author Derek Hulley
* @since V2.2SP4 * @since V2.2SP4
*/ */
public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationEventPublisherAware public class FixNameCrcValuesPatch extends AbstractPatch
{ {
private static final String MSG_SUCCESS = "patch.fixNameCrcValues.result"; private static final String MSG_SUCCESS = "patch.fixNameCrcValues.result";
private static final String MSG_REWRITTEN = "patch.fixNameCrcValues.fixed"; private static final String MSG_REWRITTEN = "patch.fixNameCrcValues.fixed";
@@ -71,7 +70,6 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE
private NodeDaoService nodeDaoService; private NodeDaoService nodeDaoService;
private QNameDAO qnameDAO; private QNameDAO qnameDAO;
private RuleService ruleService; private RuleService ruleService;
private ApplicationEventPublisher applicationEventPublisher;
public FixNameCrcValuesPatch() public FixNameCrcValuesPatch()
{ {
@@ -106,14 +104,6 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE
this.ruleService = ruleService; this.ruleService = ruleService;
} }
/* (non-Javadoc)
* @see org.springframework.context.ApplicationEventPublisherAware#setApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher)
*/
public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher)
{
this.applicationEventPublisher = applicationEventPublisher;
}
@Override @Override
protected void checkProperties() protected void checkProperties()
{ {
@@ -180,20 +170,33 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE
public String fixCrcValues() throws Exception public String fixCrcValues() throws Exception
{ {
// get the association types to check // get the association types to check
BatchProcessor<Long> batchProcessor = new BatchProcessor<Long>(logger, transactionService BatchProcessor<Long> batchProcessor = new BatchProcessor<Long>(
.getRetryingTransactionHelper(), ruleService, tenantAdminService, applicationEventPublisher, findMismatchedCrcs(), "FixNameCrcValuesPatch",
"FixNameCrcValuesPatch", 1000, 2, 20); transactionService.getRetryingTransactionHelper(),
findMismatchedCrcs(),
2, 20,
applicationEventPublisher,
logger, 1000);
// Precautionary flush and clear so that we have an empty session // Precautionary flush and clear so that we have an empty session
getSession().flush(); getSession().flush();
getSession().clear(); getSession().clear();
int updated = batchProcessor.process(new Worker<Long>(){ int updated = batchProcessor.process(new BatchProcessWorker<Long>()
{
public String getIdentifier(Long entry) public String getIdentifier(Long entry)
{ {
return entry.toString(); return entry.toString();
} }
public void beforeProcess() throws Throwable
{
// Switch rules off
ruleService.disableRules();
// Authenticate as system
String systemUsername = AuthenticationUtil.getSystemUserName();
AuthenticationUtil.setFullyAuthenticatedUser(systemUsername);
}
public void process(Long childAssocId) throws Throwable public void process(Long childAssocId) throws Throwable
{ {
@@ -247,7 +250,13 @@ public class FixNameCrcValuesPatch extends AbstractPatch implements ApplicationE
// Record // Record
writeLine(I18NUtil.getMessage(MSG_REWRITTEN, childNode.getId(), childName, oldChildCrc, childCrc, writeLine(I18NUtil.getMessage(MSG_REWRITTEN, childNode.getId(), childName, oldChildCrc, childCrc,
qname, oldQNameCrc, qnameCrc)); qname, oldQNameCrc, qnameCrc));
}}, true); }
public void afterProcess() throws Throwable
{
ruleService.enableRules();
}
}, true);
String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile); String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile);

View File

@@ -19,6 +19,7 @@
package org.alfresco.repo.admin.registry; package org.alfresco.repo.admin.registry;
import java.io.Serializable; import java.io.Serializable;
import java.util.Arrays;
/** /**
* Key for looking up registry metadata. * Key for looking up registry metadata.
@@ -115,6 +116,46 @@ public class RegistryKey implements Serializable
return sb.toString(); return sb.toString();
} }
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
RegistryKey other = (RegistryKey) obj;
if (namespaceUri == null)
{
if (other.namespaceUri != null)
return false;
}
else if (!namespaceUri.equals(other.namespaceUri))
return false;
if (!Arrays.equals(path, other.path))
return false;
if (property == null)
{
if (other.property != null)
return false;
}
else if (!property.equals(other.property))
return false;
return true;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((namespaceUri == null) ? 0 : namespaceUri.hashCode());
result = prime * result + Arrays.hashCode(path);
result = prime * result + ((property == null) ? 0 : property.hashCode());
return result;
}
public String getNamespaceUri() public String getNamespaceUri()
{ {
return namespaceUri; return namespaceUri;

View File

@@ -23,6 +23,7 @@ import java.util.List;
import org.alfresco.repo.domain.DbAccessControlList; import org.alfresco.repo.domain.DbAccessControlList;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.transaction.TransactionService; import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@@ -333,14 +334,17 @@ public class OrphanReaper
else if (node.getType() == AVMNodeType.PLAIN_FILE) else if (node.getType() == AVMNodeType.PLAIN_FILE)
{ {
PlainFileNode file = (PlainFileNode)node; PlainFileNode file = (PlainFileNode)node;
if (!file.isLegacyContentData()) if (file.isLegacyContentData())
{ {
Long contentDataId = file.getContentDataId(); // We quickly convert the old ContentData to the new storage
if (contentDataId != null) ContentData contentData = file.getContentData();
{ file.setContentData(contentData);
// The ContentDataDAO will take care of dereferencing and cleanup }
AVMDAOs.Instance().contentDataDAO.deleteContentData(contentDataId); Long contentDataId = file.getContentDataId();
} if (contentDataId != null)
{
// The ContentDataDAO will take care of dereferencing and cleanup
AVMDAOs.Instance().contentDataDAO.deleteContentData(contentDataId);
} }
} }
// Finally, delete it // Finally, delete it

View File

@@ -28,7 +28,7 @@ import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.repo.security.permissions.ACLCopyMode; import org.alfresco.repo.security.permissions.ACLCopyMode;
import org.alfresco.service.cmr.avm.AVMNodeDescriptor; import org.alfresco.service.cmr.avm.AVMNodeDescriptor;
import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
/** /**
* A plain old file. Contains a Content object. * A plain old file. Contains a Content object.
@@ -67,7 +67,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
public PlainFileNodeImpl() public PlainFileNodeImpl()
{ {
} }
/** /**
* Make one from just an AVMStore. * Make one from just an AVMStore.
* This is the constructor used when a brand new plain file is being made. * This is the constructor used when a brand new plain file is being made.
@@ -76,7 +76,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
public PlainFileNodeImpl(AVMStore store) public PlainFileNodeImpl(AVMStore store)
{ {
super(store); super(store);
setVersionID(1); setVersionID(1);
} }
/** /**
@@ -92,10 +92,10 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
// layered files. // layered files.
setContentData(other.getContentData(null)); setContentData(other.getContentData(null));
setVersionID(other.getVersionID() + 1); setVersionID(other.getVersionID() + 1);
copyACLs(other, parentAcl, mode); copyACLs(other, parentAcl, mode);
copyCreationAndOwnerBasicAttributes(other); copyCreationAndOwnerBasicAttributes(other);
AVMDAOs.Instance().fAVMNodeDAO.save(this); AVMDAOs.Instance().fAVMNodeDAO.save(this);
copyProperties(other); copyProperties(other);
@@ -114,8 +114,8 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
public PlainFileNodeImpl(AVMStore store, public PlainFileNodeImpl(AVMStore store,
BasicAttributes attrs, BasicAttributes attrs,
ContentData content, ContentData content,
Map<QName, PropertyValue> props, Map<QName, PropertyValue> props,
Set<QName> aspects, Set<QName> aspects,
DbAccessControlList acl, DbAccessControlList acl,
int versionID, Long parentAcl, ACLCopyMode mode) int versionID, Long parentAcl, ACLCopyMode mode)
{ {
@@ -129,10 +129,10 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
setAcl(acl.getCopy(parentAcl, mode)); setAcl(acl.getCopy(parentAcl, mode));
} }
AVMDAOs.Instance().fAVMNodeDAO.save(this); AVMDAOs.Instance().fAVMNodeDAO.save(this);
addProperties(props); addProperties(props);
setAspects(new HashSet<QName>(aspects)); setAspects(new HashSet<QName>(aspects));
} }
/** /**
@@ -414,7 +414,14 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
Long contentDataId = getContentDataId(); Long contentDataId = getContentDataId();
try try
{ {
return AVMDAOs.Instance().contentDataDAO.getContentData(contentDataId).getSecond(); if (contentDataId == null)
{
return new ContentData(null, null, 0L, null);
}
else
{
return AVMDAOs.Instance().contentDataDAO.getContentData(contentDataId).getSecond();
}
} }
catch (Throwable e) catch (Throwable e)
{ {

View File

@@ -34,15 +34,12 @@ import java.util.concurrent.TimeUnit;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.repo.tenant.TenantService;
import org.alfresco.repo.tenant.TenantUserService;
import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper;
import org.alfresco.repo.transaction.TransactionListenerAdapter; import org.alfresco.repo.transaction.TransactionListenerAdapter;
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
import org.alfresco.service.cmr.rule.RuleService; import org.alfresco.util.TraceableThreadFactory;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.ApplicationEventPublisher;
/** /**
@@ -59,20 +56,15 @@ import org.springframework.context.ApplicationEventPublisher;
*/ */
public class BatchProcessor<T> implements BatchMonitor public class BatchProcessor<T> implements BatchMonitor
{ {
/** The factory for all new threads */
private TraceableThreadFactory threadFactory;
/** The logger to use. */ /** The logger to use. */
private final Log logger; private final Log logger;
/** The retrying transaction helper. */ /** The retrying transaction helper. */
private final RetryingTransactionHelper retryingTransactionHelper; private final RetryingTransactionHelper retryingTransactionHelper;
/** The rule service. */
private final RuleService ruleService;
/** The tenant user service. */
private final TenantUserService tenantUserService;
private final String tenantDomain;
/** The collection. */ /** The collection. */
private final Collection<T> collection; private final Collection<T> collection;
@@ -113,84 +105,78 @@ public class BatchProcessor<T> implements BatchMonitor
private Date endTime; private Date endTime;
/** /**
* Instantiates a new batch processor. * Instantiates a new batch processor using a the default logger, which references
* this class as the log category.
* *
* @param logger * @see #BatchProcessor(String, RetryingTransactionHelper, Collection, int, int, ApplicationEventPublisher, Log, int)
* the logger to use
* @param retryingTransactionHelper
* the retrying transaction helper
* @param ruleService
* the rule service
* @param collection
* the collection
* @param processName
* the process name
* @param loggingInterval
* the number of entries to process before reporting progress
* @param applicationEventPublisher
* the application event publisher
* @param workerThreads
* the number of worker threads
* @param batchSize
* the number of entries we process at a time in a transaction
*/ */
public BatchProcessor(Log logger, RetryingTransactionHelper retryingTransactionHelper, RuleService ruleService, public BatchProcessor(
ApplicationEventPublisher applicationEventPublisher, Collection<T> collection, String processName, String processName,
int loggingInterval, int workerThreads, int batchSize) RetryingTransactionHelper retryingTransactionHelper,
Collection<T> collection,
int workerThreads, int batchSize)
{ {
this(logger, retryingTransactionHelper, ruleService, null, applicationEventPublisher, collection, processName, this(
loggingInterval, workerThreads, batchSize); processName,
retryingTransactionHelper,
collection,
workerThreads,
batchSize, null, null, 1);
} }
/** /**
* Instantiates a new batch processor. * Instantiates a new batch processor.
* *
* @param logger
* the logger to use
* @param retryingTransactionHelper
* the retrying transaction helper
* @param ruleService
* the rule service
* @param tenantUserService
* the tenant user service
* @param collection
* the collection
* @param processName * @param processName
* the process name * the process name
* @param loggingInterval * @param retryingTransactionHelper
* the number of entries to process before reporting progress * the retrying transaction helper
* @param applicationEventPublisher * @param collection
* the application event publisher * the collection
* @param workerThreads * @param workerThreads
* the number of worker threads * the number of worker threads
* @param batchSize * @param batchSize
* the number of entries we process at a time in a transaction * the number of entries we process at a time in a transaction
* @param applicationEventPublisher
* the application event publisher (may be <tt>null</tt>)
* @param logger
* the logger to use (may be <tt>null</tt>)
* @param loggingInterval
* the number of entries to process before reporting progress
*/ */
public BatchProcessor(Log logger, RetryingTransactionHelper retryingTransactionHelper, RuleService ruleService, public BatchProcessor(
TenantUserService tenantUserService, ApplicationEventPublisher applicationEventPublisher, Collection<T> collection, String processName, String processName,
int loggingInterval, int workerThreads, int batchSize) RetryingTransactionHelper retryingTransactionHelper,
Collection<T> collection,
int workerThreads, int batchSize,
ApplicationEventPublisher applicationEventPublisher,
Log logger,
int loggingInterval)
{ {
this.logger = logger; this.threadFactory = new TraceableThreadFactory();
this.retryingTransactionHelper = retryingTransactionHelper; this.threadFactory.setNamePrefix(processName);
this.ruleService = ruleService; this.threadFactory.setThreadDaemon(true);
this.tenantUserService = tenantUserService;
this.collection = collection;
this.processName = processName; this.processName = processName;
this.loggingInterval = loggingInterval; this.retryingTransactionHelper = retryingTransactionHelper;
this.collection = collection;
this.workerThreads = workerThreads; this.workerThreads = workerThreads;
this.batchSize = batchSize; this.batchSize = batchSize;
if (logger == null)
if (tenantUserService != null)
{ {
this.tenantDomain = tenantUserService.getUserDomain(AuthenticationUtil.getRunAsUser()); this.logger = LogFactory.getLog(this.getClass());
} }
else else
{ {
this.tenantDomain = TenantService.DEFAULT_DOMAIN; this.logger = logger;
} }
this.loggingInterval = loggingInterval;
// Let the (enterprise) monitoring side know of our presence // Let the (enterprise) monitoring side know of our presence
applicationEventPublisher.publishEvent(new BatchMonitorEvent(this)); if (applicationEventPublisher != null)
{
applicationEventPublisher.publishEvent(new BatchMonitorEvent(this));
}
} }
/* /*
@@ -302,14 +288,14 @@ public class BatchProcessor<T> implements BatchMonitor
* the worker * the worker
* @param splitTxns * @param splitTxns
* Can the modifications to Alfresco be split across multiple transactions for maximum performance? If * Can the modifications to Alfresco be split across multiple transactions for maximum performance? If
* <code>true</code>, worker invocations are isolated in separate transactions in batches of 10 for * <code>true</code>, worker invocations are isolated in separate transactions in batches for
* increased performance. If <code>false</code>, all invocations are performed in the current * increased performance. If <code>false</code>, all invocations are performed in the current
* transaction. This is required if calling synchronously (e.g. in response to an authentication event in * transaction. This is required if calling synchronously (e.g. in response to an authentication event in
* the same transaction). * the same transaction).
* @return the number of invocations * @return the number of invocations
*/ */
@SuppressWarnings("serial") @SuppressWarnings("serial")
public int process(final Worker<T> worker, final boolean splitTxns) public int process(final BatchProcessWorker<T> worker, final boolean splitTxns)
{ {
int count = this.collection.size(); int count = this.collection.size();
synchronized (this) synchronized (this)
@@ -330,9 +316,10 @@ public class BatchProcessor<T> implements BatchMonitor
} }
// Create a thread pool executor with the specified number of threads and a finite blocking queue of jobs // Create a thread pool executor with the specified number of threads and a finite blocking queue of jobs
ExecutorService executorService = splitTxns && this.workerThreads > 1 ? new ThreadPoolExecutor( ExecutorService executorService = splitTxns && this.workerThreads > 1 ?
this.workerThreads, this.workerThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<Runnable>( new ThreadPoolExecutor(
this.workerThreads * this.batchSize * 10) this.workerThreads, this.workerThreads, 0L, TimeUnit.MILLISECONDS,
new ArrayBlockingQueue<Runnable>(this.workerThreads * this.batchSize * 10)
{ {
// Add blocking behaviour to work queue // Add blocking behaviour to work queue
@Override @Override
@@ -349,7 +336,8 @@ public class BatchProcessor<T> implements BatchMonitor
return true; return true;
} }
}) : null; },
threadFactory) : null;
try try
{ {
Iterator<T> iterator = this.collection.iterator(); Iterator<T> iterator = this.collection.iterator();
@@ -452,9 +440,8 @@ public class BatchProcessor<T> implements BatchMonitor
/** /**
* An interface for workers to be invoked by the {@link BatchProcessor}. * An interface for workers to be invoked by the {@link BatchProcessor}.
*/ */
public interface Worker<T> public interface BatchProcessWorker<T>
{ {
/** /**
* Gets an identifier for the given entry (for monitoring / logging purposes). * Gets an identifier for the given entry (for monitoring / logging purposes).
* *
@@ -464,6 +451,14 @@ public class BatchProcessor<T> implements BatchMonitor
*/ */
public String getIdentifier(T entry); public String getIdentifier(T entry);
/**
* Callback to allow thread initialization before the work entries are
* {@link #process(Object) processed}. Typically, this will include authenticating
* as a valid user and disbling or enabling any system flags that might affect the
* entry processing.
*/
public void beforeProcess() throws Throwable;
/** /**
* Processes the given entry. * Processes the given entry.
* *
@@ -473,6 +468,38 @@ public class BatchProcessor<T> implements BatchMonitor
* on any error * on any error
*/ */
public void process(T entry) throws Throwable; public void process(T entry) throws Throwable;
/**
* Callback to allow thread cleanup after the work entries have been
* {@link #process(Object) processed}.
* Typically, this will involve cleanup of authentication and resetting any
* system flags previously set.
* <p/>
* This call is made regardless of the outcome of the entry processing.
*/
public void afterProcess() throws Throwable;
}
/**
* Adaptor that allows implementations to only implement {@link #process(Object)}
*/
public static abstract class BatchProcessWorkerAdaptor<TT> implements BatchProcessWorker<TT>
{
/**
* @return Returns the <code>toString()</code> of the entry
*/
public String getIdentifier(TT entry)
{
return entry.toString();
}
/** No-op */
public void beforeProcess() throws Throwable
{
}
/** No-op */
public void afterProcess() throws Throwable
{
}
} }
/** /**
@@ -491,7 +518,7 @@ public class BatchProcessor<T> implements BatchMonitor
* @param splitTxns * @param splitTxns
* If <code>true</code>, the worker invocation is made in a new transaction. * If <code>true</code>, the worker invocation is made in a new transaction.
*/ */
public TxnCallback(Worker<T> worker, List<T> batch, boolean splitTxns) public TxnCallback(BatchProcessWorker<T> worker, List<T> batch, boolean splitTxns)
{ {
this.worker = worker; this.worker = worker;
this.batch = batch; this.batch = batch;
@@ -499,7 +526,7 @@ public class BatchProcessor<T> implements BatchMonitor
} }
/** The worker. */ /** The worker. */
private final Worker<T> worker; private final BatchProcessWorker<T> worker;
/** The batch. */ /** The batch. */
private final List<T> batch; private final List<T> batch;
@@ -602,26 +629,21 @@ public class BatchProcessor<T> implements BatchMonitor
*/ */
public void run() public void run()
{ {
// Disable rules for this thread try
BatchProcessor.this.ruleService.disableRules(); {
}
catch (Throwable e)
{
BatchProcessor.this.logger.error("Failed to cleanup Worker after processing.", e);
}
final BatchProcessor<T>.TxnCallback callback = this; final BatchProcessor<T>.TxnCallback callback = this;
try try
{ {
String systemUser = AuthenticationUtil.getSystemUserName(); worker.beforeProcess();
if (tenantUserService != null) BatchProcessor.this.retryingTransactionHelper.doInTransaction(callback, false, splitTxns);
{ worker.afterProcess();
systemUser = tenantUserService.getDomainUser(AuthenticationUtil.getSystemUserName(), tenantDomain);
}
AuthenticationUtil.runAs(new RunAsWork<Void>()
{
public Void doWork() throws Exception
{
BatchProcessor.this.retryingTransactionHelper.doInTransaction(callback, false, splitTxns);
return null;
}
}, systemUser);
} }
catch (Throwable t) catch (Throwable t)
{ {
@@ -651,11 +673,6 @@ public class BatchProcessor<T> implements BatchMonitor
throw new AlfrescoRuntimeException("Transactional error during " + getProcessName(), t); throw new AlfrescoRuntimeException("Transactional error during " + getProcessName(), t);
} }
} }
finally
{
// Re-enable rules
BatchProcessor.this.ruleService.enableRules();
}
commitProgress(); commitProgress();
} }

View File

@@ -34,6 +34,7 @@ import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName; import org.alfresco.service.namespace.QName;
import org.alfresco.service.transaction.TransactionService; import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.VmShutdownListener; import org.alfresco.util.VmShutdownListener;
import org.alfresco.util.VmShutdownListener.VmShutdownException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.springframework.extensions.surf.util.Pair; import org.springframework.extensions.surf.util.Pair;
@@ -41,8 +42,6 @@ import org.springframework.extensions.surf.util.PropertyCheck;
/** /**
* This component is responsible cleaning up orphaned content. * This component is responsible cleaning up orphaned content.
* <p/>
* <b>TODO: Fix up new comments</b>
* *
* Clean-up happens at two levels.<p/> * Clean-up happens at two levels.<p/>
* <u><b>Eager cleanup:</b></u> (since 3.2)<p/> * <u><b>Eager cleanup:</b></u> (since 3.2)<p/>
@@ -53,10 +52,9 @@ import org.springframework.extensions.surf.util.PropertyCheck;
* procedures should be plugged in as listeners if this is required. * procedures should be plugged in as listeners if this is required.
* <p/> * <p/>
* <u><b>Lazy cleanup:</b></u><p/> * <u><b>Lazy cleanup:</b></u><p/>
* This is triggered by means of a {@link ContentStoreCleanupJob Quartz job}. This is * This is triggered by means of a {@link ContentStoreCleanupJob Quartz job}. This process
* a heavy-weight process that effectively compares the database metadata with the * gets content URLs that have been marked as orphaned and cleans up the various stores.
* content URLs controlled by the various stores. Once again, the listeners are called * Once again, the listeners are called appropriately.
* appropriately.
* <p/> * <p/>
* <u><b>How backup policies are affected:</b></u><p/> * <u><b>How backup policies are affected:</b></u><p/>
* When restoring the system from a backup, the type of restore required is dictated by * When restoring the system from a backup, the type of restore required is dictated by
@@ -352,15 +350,4 @@ public class ContentStoreCleaner
// Done // Done
return size; return size;
} }
/**
* Message carrier to break out of loops using the callback.
*
* @author Derek Hulley
* @since 2.1.3
*/
private class VmShutdownException extends RuntimeException
{
private static final long serialVersionUID = -5876107469054587072L;
}
} }

View File

@@ -22,7 +22,6 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.InputStream; import java.io.InputStream;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@@ -33,6 +32,7 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap; import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.descriptor.DescriptorServiceImpl.BaseDescriptor; import org.alfresco.repo.descriptor.DescriptorServiceImpl.BaseDescriptor;
import org.alfresco.repo.importer.ImporterBootstrap; import org.alfresco.repo.importer.ImporterBootstrap;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.ContentService; import org.alfresco.service.cmr.repository.ContentService;
import org.alfresco.service.cmr.repository.ContentWriter; import org.alfresco.service.cmr.repository.ContentWriter;
@@ -201,17 +201,18 @@ public class RepositoryDescriptorDAOImpl implements DescriptorDAO
props.put(ContentModel.PROP_SYS_VERSION_SCHEMA, serverDescriptor.getSchema()); props.put(ContentModel.PROP_SYS_VERSION_SCHEMA, serverDescriptor.getSchema());
this.nodeService.addProperties(currentDescriptorNodeRef, props); this.nodeService.addProperties(currentDescriptorNodeRef, props);
// The version edition property may already have been overwritten with a license, so only set the property // ALF-726: v3.1.x Content Cleaner Job needs to be ported to v3.2
// if it doesn't already contain ContentData // In order to migrate properly, this property needs to be d:content. We will rewrite the property with the
final Serializable value = this.nodeService.getProperty(currentDescriptorNodeRef, // license update code. There is no point attempting to rewrite the property here.
final Serializable value = this.nodeService.getProperty(
currentDescriptorNodeRef,
ContentModel.PROP_SYS_VERSION_EDITION); ContentModel.PROP_SYS_VERSION_EDITION);
if (!(value instanceof Collection) || ((Collection<?>) value).isEmpty() if (value == null)
|| ((Collection<?>) value).iterator().next() instanceof String)
{ {
final Collection<String> editions = new ArrayList<String>(); this.nodeService.setProperty(
editions.add(serverDescriptor.getEdition()); currentDescriptorNodeRef,
this.nodeService.setProperty(currentDescriptorNodeRef, ContentModel.PROP_SYS_VERSION_EDITION, ContentModel.PROP_SYS_VERSION_EDITION,
(Serializable) editions); new ContentData(null, null, 0L, null));
} }
// done // done

View File

@@ -0,0 +1,57 @@
/*
* Copyright (C) 2005-2008 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.domain;
import java.io.Serializable;
/**
* Data type carrying the ID of a <code>ContentData</code> reference.
*
* @author Derek Hulley
* @since 3.2.1
*/
public class ContentDataId implements Serializable
{
private static final long serialVersionUID = -4980820192507809266L;
private final Long id;
public ContentDataId(Long id)
{
super();
this.id = id;
}
@Override
public String toString()
{
return "ContentDataId [id=" + id + "]";
}
public Long getId()
{
return id;
}
}

View File

@@ -127,7 +127,18 @@ public class NodePropertyValue implements Cloneable, Serializable
@Override @Override
Serializable convert(Serializable value) Serializable convert(Serializable value)
{ {
return DefaultTypeConverter.INSTANCE.convert(Long.class, value); if (value == null)
{
return null;
}
else if (value instanceof ContentDataId)
{
return ((ContentDataId)value).getId();
}
else
{
return DefaultTypeConverter.INSTANCE.convert(Long.class, value);
}
} }
}, },
FLOAT FLOAT
@@ -467,6 +478,41 @@ public class NodePropertyValue implements Cloneable, Serializable
{ {
return DefaultTypeConverter.INSTANCE.convert(Period.class, value); return DefaultTypeConverter.INSTANCE.convert(Period.class, value);
} }
},
CONTENT_DATA_ID
{
@Override
public Integer getOrdinalNumber()
{
return Integer.valueOf(21);
}
@Override
protected ValueType getPersistedType(Serializable value)
{
return ValueType.LONG;
}
@Override
Serializable convert(Serializable value)
{
if (value == null)
{
return null;
}
else if (value instanceof Long)
{
return value;
}
else if (value instanceof ContentDataId)
{
return ((ContentDataId)value).getId();
}
else
{
return DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
}
}
} }
; ;
@@ -566,6 +612,10 @@ public class NodePropertyValue implements Cloneable, Serializable
{ {
return ValueType.PERIOD; return ValueType.PERIOD;
} }
else if (value instanceof ContentDataId)
{
return ValueType.CONTENT_DATA_ID;
}
else else
{ {
// type is not recognised as belonging to any particular slot // type is not recognised as belonging to any particular slot
@@ -592,7 +642,7 @@ public class NodePropertyValue implements Cloneable, Serializable
valueTypesByPropertyType.put(DataTypeDefinition.DATE, ValueType.DATE); valueTypesByPropertyType.put(DataTypeDefinition.DATE, ValueType.DATE);
valueTypesByPropertyType.put(DataTypeDefinition.DATETIME, ValueType.DATE); valueTypesByPropertyType.put(DataTypeDefinition.DATETIME, ValueType.DATE);
valueTypesByPropertyType.put(DataTypeDefinition.CATEGORY, ValueType.NODEREF); valueTypesByPropertyType.put(DataTypeDefinition.CATEGORY, ValueType.NODEREF);
valueTypesByPropertyType.put(DataTypeDefinition.CONTENT, ValueType.CONTENT); valueTypesByPropertyType.put(DataTypeDefinition.CONTENT, ValueType.CONTENT_DATA_ID);
valueTypesByPropertyType.put(DataTypeDefinition.TEXT, ValueType.STRING); valueTypesByPropertyType.put(DataTypeDefinition.TEXT, ValueType.STRING);
valueTypesByPropertyType.put(DataTypeDefinition.MLTEXT, ValueType.MLTEXT); valueTypesByPropertyType.put(DataTypeDefinition.MLTEXT, ValueType.MLTEXT);
valueTypesByPropertyType.put(DataTypeDefinition.NODE_REF, ValueType.NODEREF); valueTypesByPropertyType.put(DataTypeDefinition.NODE_REF, ValueType.NODEREF);

View File

@@ -1,4 +1,4 @@
/* /*
* Copyright (C) 2005-2010 Alfresco Software Limited. * Copyright (C) 2005-2010 Alfresco Software Limited.
* *
* This file is part of Alfresco * This file is part of Alfresco
@@ -14,60 +14,60 @@
* GNU Lesser General Public License for more details. * GNU Lesser General Public License for more details.
* *
* You should have received a copy of the GNU Lesser General Public License * You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/ */
package org.alfresco.repo.domain.contentdata; package org.alfresco.repo.domain.contentdata;
import java.io.Serializable; import java.io.Serializable;
import java.util.Locale; import java.util.Locale;
import java.util.Set; import java.util.Set;
import org.alfresco.repo.cache.SimpleCache; import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.cache.lookup.EntityLookupCache; import org.alfresco.repo.cache.lookup.EntityLookupCache;
import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor; import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor;
import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner; import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner;
import org.alfresco.repo.domain.LocaleDAO; import org.alfresco.repo.domain.LocaleDAO;
import org.alfresco.repo.domain.encoding.EncodingDAO; import org.alfresco.repo.domain.encoding.EncodingDAO;
import org.alfresco.repo.domain.mimetype.MimetypeDAO; import org.alfresco.repo.domain.mimetype.MimetypeDAO;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.TransactionListenerAdapter; import org.alfresco.repo.transaction.TransactionListenerAdapter;
import org.alfresco.repo.transaction.TransactionalResourceHelper; import org.alfresco.repo.transaction.TransactionalResourceHelper;
import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.util.EqualsHelper; import org.alfresco.util.EqualsHelper;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.springframework.dao.ConcurrencyFailureException; import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.extensions.surf.util.Pair; import org.springframework.extensions.surf.util.Pair;
/** /**
* Abstract implementation for ContentData DAO. * Abstract implementation for ContentData DAO.
* <p> * <p>
* This provides basic services such as caching, but defers to the underlying implementation * This provides basic services such as caching, but defers to the underlying implementation
* for CRUD operations. * for CRUD operations.
* <p> * <p>
* The DAO deals in {@link ContentData} instances. The cache is primarily present to decode * The DAO deals in {@link ContentData} instances. The cache is primarily present to decode
* IDs into <code>ContentData</code> instances. * IDs into <code>ContentData</code> instances.
* *
* @author Derek Hulley * @author Derek Hulley
* @since 3.2 * @since 3.2
*/ */
public abstract class AbstractContentDataDAOImpl implements ContentDataDAO public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{ {
private static final String CACHE_REGION_CONTENT_DATA = "ContentData"; private static final String CACHE_REGION_CONTENT_DATA = "ContentData";
/** /**
* Content URL IDs to delete before final commit. * Content URL IDs to delete before final commit.
*/ */
private static final String KEY_PRE_COMMIT_CONTENT_URL_DELETIONS = "AbstractContentDataDAOImpl.PreCommitContentUrlDeletions"; private static final String KEY_PRE_COMMIT_CONTENT_URL_DELETIONS = "AbstractContentDataDAOImpl.PreCommitContentUrlDeletions";
private static Log logger = LogFactory.getLog(AbstractContentDataDAOImpl.class); private static Log logger = LogFactory.getLog(AbstractContentDataDAOImpl.class);
private final ContentDataCallbackDAO contentDataCallbackDAO; private final ContentDataCallbackDAO contentDataCallbackDAO;
private MimetypeDAO mimetypeDAO; private MimetypeDAO mimetypeDAO;
private EncodingDAO encodingDAO; private EncodingDAO encodingDAO;
private LocaleDAO localeDAO; private LocaleDAO localeDAO;
private EagerContentStoreCleaner contentStoreCleaner; private EagerContentStoreCleaner contentStoreCleaner;
/** /**
* Cache for the ContentData class:<br/> * Cache for the ContentData class:<br/>
* KEY: ID<br/> * KEY: ID<br/>
@@ -85,98 +85,98 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>(contentDataCallbackDAO); this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>(contentDataCallbackDAO);
} }
public void setMimetypeDAO(MimetypeDAO mimetypeDAO) public void setMimetypeDAO(MimetypeDAO mimetypeDAO)
{ {
this.mimetypeDAO = mimetypeDAO; this.mimetypeDAO = mimetypeDAO;
} }
public void setEncodingDAO(EncodingDAO encodingDAO) public void setEncodingDAO(EncodingDAO encodingDAO)
{ {
this.encodingDAO = encodingDAO; this.encodingDAO = encodingDAO;
} }
public void setLocaleDAO(LocaleDAO localeDAO) public void setLocaleDAO(LocaleDAO localeDAO)
{ {
this.localeDAO = localeDAO; this.localeDAO = localeDAO;
} }
/** /**
* Set this property to enable eager cleanup of orphaned content. * Set this property to enable eager cleanup of orphaned content.
* *
* @param contentStoreCleaner an eager cleaner (may be <tt>null</tt>) * @param contentStoreCleaner an eager cleaner (may be <tt>null</tt>)
*/ */
public void setContentStoreCleaner(EagerContentStoreCleaner contentStoreCleaner) public void setContentStoreCleaner(EagerContentStoreCleaner contentStoreCleaner)
{ {
this.contentStoreCleaner = contentStoreCleaner; this.contentStoreCleaner = contentStoreCleaner;
} }
/** /**
* @param contentDataCache the cache of IDs to ContentData and vice versa * @param contentDataCache the cache of IDs to ContentData and vice versa
*/ */
public void setContentDataCache(SimpleCache<Long, ContentData> contentDataCache) public void setContentDataCache(SimpleCache<Long, ContentData> contentDataCache)
{ {
this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>( this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>(
contentDataCache, contentDataCache,
CACHE_REGION_CONTENT_DATA, CACHE_REGION_CONTENT_DATA,
contentDataCallbackDAO); contentDataCallbackDAO);
} }
/** /**
* Register new content for post-rollback handling * Register new content for post-rollback handling
*/ */
protected void registerNewContentUrl(String contentUrl) protected void registerNewContentUrl(String contentUrl)
{ {
contentStoreCleaner.registerNewContentUrl(contentUrl); contentStoreCleaner.registerNewContentUrl(contentUrl);
} }
/** /**
* A <b>content_url</b> entity was dereferenced. This makes no assumptions about the * A <b>content_url</b> entity was dereferenced. This makes no assumptions about the
* current references - dereference deletion is handled in the commit phase. * current references - dereference deletion is handled in the commit phase.
*/ */
protected void registerDereferencedContentUrl(String contentUrl) protected void registerDereferencedContentUrl(String contentUrl)
{ {
Set<String> contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS); Set<String> contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS);
if (contentUrls.size() == 0) if (contentUrls.size() == 0)
{ {
ContentUrlDeleteTransactionListener listener = new ContentUrlDeleteTransactionListener(); ContentUrlDeleteTransactionListener listener = new ContentUrlDeleteTransactionListener();
AlfrescoTransactionSupport.bindListener(listener); AlfrescoTransactionSupport.bindListener(listener);
} }
contentUrls.add(contentUrl); contentUrls.add(contentUrl);
} }
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
public Pair<Long, ContentData> createContentData(ContentData contentData) public Pair<Long, ContentData> createContentData(ContentData contentData)
{ {
if (contentData == null) if (contentData == null)
{ {
throw new IllegalArgumentException("ContentData values cannot be null"); throw new IllegalArgumentException("ContentData values cannot be null");
} }
Pair<Long, ContentData> entityPair = contentDataCache.getOrCreateByValue(contentData); Pair<Long, ContentData> entityPair = contentDataCache.getOrCreateByValue(contentData);
return entityPair; return entityPair;
} }
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
public Pair<Long, ContentData> getContentData(Long id) public Pair<Long, ContentData> getContentData(Long id)
{ {
if (id == null) if (id == null)
{ {
throw new IllegalArgumentException("Cannot look up ContentData by null ID."); throw new IllegalArgumentException("Cannot look up ContentData by null ID.");
} }
Pair<Long, ContentData> entityPair = contentDataCache.getByKey(id); Pair<Long, ContentData> entityPair = contentDataCache.getByKey(id);
if (entityPair == null) if (entityPair == null)
{ {
throw new DataIntegrityViolationException("No ContentData value exists for ID " + id); throw new DataIntegrityViolationException("No ContentData value exists for ID " + id);
} }
return entityPair; return entityPair;
} }
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
public void updateContentData(Long id, ContentData contentData) public void updateContentData(Long id, ContentData contentData)
{ {
if (id == null) if (id == null)
@@ -197,21 +197,21 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
public void deleteContentData(Long id) public void deleteContentData(Long id)
{ {
if (id == null) if (id == null)
{ {
throw new IllegalArgumentException("Cannot delete ContentData by null ID."); throw new IllegalArgumentException("Cannot delete ContentData by null ID.");
} }
int deleted = contentDataCache.deleteByKey(id); int deleted = contentDataCache.deleteByKey(id);
if (deleted < 1) if (deleted < 1)
{ {
throw new ConcurrencyFailureException("ContentData with ID " + id + " no longer exists"); throw new ConcurrencyFailureException("ContentData with ID " + id + " no longer exists");
} }
return; return;
} }
/** /**
* Callback for <b>alf_content_data</b> DAO. * Callback for <b>alf_content_data</b> DAO.
*/ */
private class ContentDataCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentData, Serializable> private class ContentDataCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentData, Serializable>
@@ -254,83 +254,83 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
} }
/** /**
* Translates this instance into an externally-usable <code>ContentData</code> instance. * Translates this instance into an externally-usable <code>ContentData</code> instance.
*/ */
private ContentData makeContentData(ContentDataEntity contentDataEntity) private ContentData makeContentData(ContentDataEntity contentDataEntity)
{ {
// Decode content URL // Decode content URL
String contentUrl = contentDataEntity.getContentUrl(); String contentUrl = contentDataEntity.getContentUrl();
long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue(); long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue();
// Decode mimetype // Decode mimetype
Long mimetypeId = contentDataEntity.getMimetypeId(); Long mimetypeId = contentDataEntity.getMimetypeId();
String mimetype = null; String mimetype = null;
if (mimetypeId != null) if (mimetypeId != null)
{ {
mimetype = mimetypeDAO.getMimetype(mimetypeId).getSecond(); mimetype = mimetypeDAO.getMimetype(mimetypeId).getSecond();
} }
// Decode encoding // Decode encoding
Long encodingId = contentDataEntity.getEncodingId(); Long encodingId = contentDataEntity.getEncodingId();
String encoding = null; String encoding = null;
if (encodingId != null) if (encodingId != null)
{ {
encoding = encodingDAO.getEncoding(encodingId).getSecond(); encoding = encodingDAO.getEncoding(encodingId).getSecond();
} }
// Decode locale // Decode locale
Long localeId = contentDataEntity.getLocaleId(); Long localeId = contentDataEntity.getLocaleId();
Locale locale = null; Locale locale = null;
if (localeId != null) if (localeId != null)
{ {
locale = localeDAO.getLocalePair(localeId).getSecond(); locale = localeDAO.getLocalePair(localeId).getSecond();
} }
// Build the ContentData // Build the ContentData
ContentData contentData = new ContentData(contentUrl, mimetype, size, encoding, locale); ContentData contentData = new ContentData(contentUrl, mimetype, size, encoding, locale);
// Done // Done
return contentData; return contentData;
} }
/** /**
* Translates the {@link ContentData} into persistable values using the helper DAOs * Translates the {@link ContentData} into persistable values using the helper DAOs
*/ */
private ContentDataEntity createContentDataEntity(ContentData contentData) private ContentDataEntity createContentDataEntity(ContentData contentData)
{ {
// Resolve the content URL // Resolve the content URL
Long contentUrlId = null; Long contentUrlId = null;
String contentUrl = contentData.getContentUrl(); String contentUrl = contentData.getContentUrl();
long size = contentData.getSize(); long size = contentData.getSize();
if (contentUrl != null) if (contentUrl != null)
{ {
// We must find or create the ContentUrlEntity // We must find or create the ContentUrlEntity
contentUrlId = getOrCreateContentUrlEntity(contentUrl, size).getId(); contentUrlId = getOrCreateContentUrlEntity(contentUrl, size).getId();
} }
// Resolve the mimetype // Resolve the mimetype
Long mimetypeId = null; Long mimetypeId = null;
String mimetype = contentData.getMimetype(); String mimetype = contentData.getMimetype();
if (mimetype != null) if (mimetype != null)
{ {
mimetypeId = mimetypeDAO.getOrCreateMimetype(mimetype).getFirst(); mimetypeId = mimetypeDAO.getOrCreateMimetype(mimetype).getFirst();
} }
// Resolve the encoding // Resolve the encoding
Long encodingId = null; Long encodingId = null;
String encoding = contentData.getEncoding(); String encoding = contentData.getEncoding();
if (encoding != null) if (encoding != null)
{ {
encodingId = encodingDAO.getOrCreateEncoding(encoding).getFirst(); encodingId = encodingDAO.getOrCreateEncoding(encoding).getFirst();
} }
// Resolve the locale // Resolve the locale
Long localeId = null; Long localeId = null;
Locale locale = contentData.getLocale(); Locale locale = contentData.getLocale();
if (locale != null) if (locale != null)
{ {
localeId = localeDAO.getOrCreateLocalePair(locale).getFirst(); localeId = localeDAO.getOrCreateLocalePair(locale).getFirst();
} }
// Create ContentDataEntity // Create ContentDataEntity
ContentDataEntity contentDataEntity = createContentDataEntity(contentUrlId, mimetypeId, encodingId, localeId); ContentDataEntity contentDataEntity = createContentDataEntity(contentUrlId, mimetypeId, encodingId, localeId);
// Done // Done
return contentDataEntity; return contentDataEntity;
} }
/** /**
* Translates the {@link ContentData} into persistable values using the helper DAOs * Translates the {@link ContentData} into persistable values using the helper DAOs
*/ */
private int updateContentDataEntity(ContentDataEntity contentDataEntity, ContentData contentData) private int updateContentDataEntity(ContentDataEntity contentDataEntity, ContentData contentData)
@@ -387,83 +387,90 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
} }
/** /**
* Caching method that creates an entity for <b>content_url_entity</b>. * Method to create (or get an existing) content URL. The URL will be unorphaned
*/ * whether it has been created or is being re-used.
private ContentUrlEntity getOrCreateContentUrlEntity(String contentUrl, long size) */
{ private ContentUrlEntity getOrCreateContentUrlEntity(String contentUrl, long size)
// Create the content URL entity {
ContentUrlEntity contentUrlEntity = getContentUrlEntity(contentUrl); // Create the content URL entity
// If it exists, then we can just re-use it, but check that the size is consistent ContentUrlEntity contentUrlEntity = getContentUrlEntity(contentUrl);
if (contentUrlEntity != null) // If it exists, then we can just re-use it, but check that the size is consistent
{ if (contentUrlEntity != null)
// Reuse it {
long existingSize = contentUrlEntity.getSize(); // Reuse it
if (size != existingSize) long existingSize = contentUrlEntity.getSize();
{ if (size != existingSize)
logger.warn( {
"Re-using Content URL, but size is mismatched: \n" + logger.warn(
" Inbound: " + contentUrl + "\n" + "Re-using Content URL, but size is mismatched: \n" +
" Existing: " + contentUrlEntity); " Inbound: " + contentUrl + "\n" +
} " Existing: " + contentUrlEntity);
} }
else // Check orphan state
{ if (contentUrlEntity.getOrphanTime() != null)
// Create it {
contentUrlEntity = createContentUrlEntity(contentUrl, size); Long id = contentUrlEntity.getId();
} updateContentUrlOrphanTime(id, null);
// Done }
return contentUrlEntity; }
} else
{
/** // Create it
* @param contentUrl the content URL to create or search for contentUrlEntity = createContentUrlEntity(contentUrl, size);
*/ }
protected abstract ContentUrlEntity createContentUrlEntity(String contentUrl, long size); // Done
return contentUrlEntity;
/** }
* @param id the ID of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist /**
*/ * @param contentUrl the content URL to create or search for
protected abstract ContentUrlEntity getContentUrlEntity(Long id); */
protected abstract ContentUrlEntity createContentUrlEntity(String contentUrl, long size);
/**
* @param contentUrl the URL of the <b>content url</b> entity /**
* @return Return the entity or <tt>null</tt> if it doesn't exist * @param id the ID of the <b>content url</b> entity
*/ * @return Return the entity or <tt>null</tt> if it doesn't exist
protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl); */
protected abstract ContentUrlEntity getContentUrlEntity(Long id);
/**
* @param contentUrl the URL of the <b>content url</b> entity /**
* @return Return the entity or <tt>null</tt> if it doesn't exist or is still * @param contentUrl the URL of the <b>content url</b> entity
* referenced by a <b>content_data</b> entity * @return Return the entity or <tt>null</tt> if it doesn't exist
*/ */
protected abstract ContentUrlEntity getContentUrlEntityUnreferenced(String contentUrl); protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl);
/** /**
* @param contentUrl the URL of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist or is still
* referenced by a <b>content_data</b> entity
*/
protected abstract ContentUrlEntity getContentUrlEntityUnreferenced(String contentUrl);
/**
* Update a content URL with the given orphan time * Update a content URL with the given orphan time
* *
* @param id the unique ID of the entity * @param id the unique ID of the entity
* @param orphanTime the time (ms since epoch) that the entity was orphaned * @param orphanTime the time (ms since epoch) that the entity was orphaned
* @return Returns the number of rows updated * @return Returns the number of rows updated
*/ */
protected abstract int updateContentUrlOrphanTime(Long id, long orphanTime); protected abstract int updateContentUrlOrphanTime(Long id, Long orphanTime);
/** /**
* Create the row for the <b>alf_content_data<b> * Create the row for the <b>alf_content_data<b>
*/ */
protected abstract ContentDataEntity createContentDataEntity( protected abstract ContentDataEntity createContentDataEntity(
Long contentUrlId, Long contentUrlId,
Long mimetypeId, Long mimetypeId,
Long encodingId, Long encodingId,
Long localeId); Long localeId);
/** /**
* @param id the entity ID * @param id the entity ID
* @return Returns the entity or <tt>null</tt> if it doesn't exist * @return Returns the entity or <tt>null</tt> if it doesn't exist
*/ */
protected abstract ContentDataEntity getContentDataEntity(Long id); protected abstract ContentDataEntity getContentDataEntity(Long id);
/** /**
* Update an existing <b>alf_content_data</b> entity * Update an existing <b>alf_content_data</b> entity
* *
* @param entity the existing entity that will be updated * @param entity the existing entity that will be updated
@@ -472,44 +479,44 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
protected abstract int updateContentDataEntity(ContentDataEntity entity); protected abstract int updateContentDataEntity(ContentDataEntity entity);
/** /**
* Delete the entity with the given ID * Delete the entity with the given ID
* *
* @return Returns the number of rows deleted * @return Returns the number of rows deleted
*/ */
protected abstract int deleteContentDataEntity(Long id); protected abstract int deleteContentDataEntity(Long id);
/** /**
* Transactional listener that deletes unreferenced <b>content_url</b> entities. * Transactional listener that deletes unreferenced <b>content_url</b> entities.
* *
* @author Derek Hulley * @author Derek Hulley
*/ */
public class ContentUrlDeleteTransactionListener extends TransactionListenerAdapter public class ContentUrlDeleteTransactionListener extends TransactionListenerAdapter
{ {
@Override @Override
public void beforeCommit(boolean readOnly) public void beforeCommit(boolean readOnly)
{ {
// Ignore read-only // Ignore read-only
if (readOnly) if (readOnly)
{ {
return; return;
} }
Set<String> contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS); Set<String> contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS);
long orphanTime = System.currentTimeMillis(); long orphanTime = System.currentTimeMillis();
for (String contentUrl : contentUrls) for (String contentUrl : contentUrls)
{ {
ContentUrlEntity contentUrlEntity = getContentUrlEntityUnreferenced(contentUrl); ContentUrlEntity contentUrlEntity = getContentUrlEntityUnreferenced(contentUrl);
if (contentUrlEntity == null) if (contentUrlEntity == null)
{ {
// It is still referenced, so ignore it // It is still referenced, so ignore it
continue; continue;
} }
// We mark the URL as orphaned. // We mark the URL as orphaned.
Long contentUrlId = contentUrlEntity.getId(); Long contentUrlId = contentUrlEntity.getId();
updateContentUrlOrphanTime(contentUrlId, orphanTime); updateContentUrlOrphanTime(contentUrlId, orphanTime);
// Pop this in the queue for deletion from the content store // Pop this in the queue for deletion from the content store
contentStoreCleaner.registerOrphanedContentUrl(contentUrl); contentStoreCleaner.registerOrphanedContentUrl(contentUrl);
} }
contentUrls.clear(); contentUrls.clear();
} }
} }
} }

View File

@@ -1,4 +1,4 @@
/* /*
* Copyright (C) 2005-2010 Alfresco Software Limited. * Copyright (C) 2005-2010 Alfresco Software Limited.
* *
* This file is part of Alfresco * This file is part of Alfresco
@@ -14,33 +14,34 @@
* GNU Lesser General Public License for more details. * GNU Lesser General Public License for more details.
* *
* You should have received a copy of the GNU Lesser General Public License * You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/ */
package org.alfresco.repo.domain.contentdata; package org.alfresco.repo.domain.contentdata;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentData;
import org.springframework.extensions.surf.util.Pair; import org.springframework.extensions.surf.util.Pair;
import org.springframework.dao.ConcurrencyFailureException; import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException;
/**
* DAO services for <b>alf_content_data</b> table /**
* * DAO services for <b>alf_content_data</b> table
* @author Derek Hulley *
* @since 3.2 * @author Derek Hulley
*/ * @since 3.2
public interface ContentDataDAO */
{ public interface ContentDataDAO
/** {
* Create a new ContentData instance. /**
* * Create a new ContentData instance.
* @param contentData the ContentData details *
* @return the ContentData pair (id, ContentData) (never null) * @param contentData the ContentData details
*/ * @return the ContentData pair (id, ContentData) (never null)
Pair<Long, ContentData> createContentData(ContentData contentData); */
Pair<Long, ContentData> createContentData(ContentData contentData);
/** /**
* Update a content data instance * Update a content data instance
@@ -49,46 +50,55 @@ public interface ContentDataDAO
* @param contentData the new data * @param contentData the new data
*/ */
void updateContentData(Long id, ContentData contentData); void updateContentData(Long id, ContentData contentData);
/** /**
* @param id the unique ID of the entity * Creates an immediately-orphaned content URL, if possible
* @return the ContentData pair (id, ContentData) or <tt>null</tt> if it doesn't exist *
* @throws AlfrescoRuntimeException if the ID provided is invalid * @param contentUrl the URL to create if it doesn't exist
*/ * @return Returns the ID-URL pair
Pair<Long, ContentData> getContentData(Long id); * @throws DataIntegrityViolationException if the URL already exists
*/
/** Pair<Long, String> createContentUrlOrphaned(String contentUrl);
* Delete an instance of content.
* @param id the unique ID of the entity /**
* @throws ConcurrencyFailureException if the ID does not exist * @param id the unique ID of the entity
*/ * @return the ContentData pair (id, ContentData) or <tt>null</tt> if it doesn't exist
void deleteContentData(Long id); * @throws AlfrescoRuntimeException if the ID provided is invalid
*/
/** Pair<Long, ContentData> getContentData(Long id);
* Deletes all <b>alf_content_data</b> rows that are referenced by the given node
* /**
* @param nodeId the node ID * Delete an instance of content.
* @param qnameIds the content properties to target * @param id the unique ID of the entity
*/ * @throws ConcurrencyFailureException if the ID does not exist
void deleteContentDataForNode(Long nodeId, Set<Long> qnameIds); */
void deleteContentData(Long id);
/**
* Interface for callbacks during content URL enumeration /**
* * Deletes all <b>alf_content_data</b> rows that are referenced by the given node
* @author Derek Hulley *
* @since 3.2 * @param nodeId the node ID
*/ * @param qnameIds the content properties to target
public static interface ContentUrlHandler */
{ void deleteContentDataForNode(Long nodeId, Set<Long> qnameIds);
/**
* Interface for callbacks during content URL enumeration
*
* @author Derek Hulley
* @since 3.2
*/
public static interface ContentUrlHandler
{
void handle(Long id, String contentUrl, Long orphanTime); void handle(Long id, String contentUrl, Long orphanTime);
} }
/** /**
* Enumerate all available content URLs that were orphaned on or before the given time * Enumerate all available content URLs that were orphaned on or before the given time
* *
* @param contentUrlHandler the callback object to process the rows * @param contentUrlHandler the callback object to process the rows
* @param maxOrphanTime the maximum orphan time * @param maxOrphanTime the maximum orphan time
*/ */
void getContentUrlsOrphaned(ContentUrlHandler contentUrlHandler, long maxOrphanTime); void getContentUrlsOrphaned(ContentUrlHandler contentUrlHandler, long maxOrphanTime);
/** /**
@@ -104,4 +114,4 @@ public interface ContentDataDAO
* Delete a batch of content URL entities. * Delete a batch of content URL entities.
*/ */
int deleteContentUrls(List<Long> ids); int deleteContentUrls(List<Long> ids);
} }

View File

@@ -18,16 +18,21 @@
*/ */
package org.alfresco.repo.domain.contentdata.ibatis; package org.alfresco.repo.domain.contentdata.ibatis;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.ibatis.IdsEntity;
import org.alfresco.repo.domain.contentdata.AbstractContentDataDAOImpl; import org.alfresco.repo.domain.contentdata.AbstractContentDataDAOImpl;
import org.alfresco.repo.domain.contentdata.ContentDataEntity; import org.alfresco.repo.domain.contentdata.ContentDataEntity;
import org.alfresco.repo.domain.contentdata.ContentUrlEntity; import org.alfresco.repo.domain.contentdata.ContentUrlEntity;
import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentData;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.extensions.surf.util.Pair;
import org.springframework.orm.ibatis.SqlMapClientTemplate; import org.springframework.orm.ibatis.SqlMapClientTemplate;
import com.ibatis.sqlmap.client.event.RowHandler; import com.ibatis.sqlmap.client.event.RowHandler;
@@ -60,6 +65,17 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
this.template = sqlMapClientTemplate; this.template = sqlMapClientTemplate;
} }
public Pair<Long, String> createContentUrlOrphaned(String contentUrl)
{
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setContentUrl(contentUrl);
contentUrlEntity.setSize(0L);
contentUrlEntity.setOrphanTime(System.currentTimeMillis());
Long id = (Long) template.insert(INSERT_CONTENT_URL, contentUrlEntity);
// Done
return new Pair<Long, String>(id, contentUrl);
}
@Override @Override
protected ContentUrlEntity createContentUrlEntity(String contentUrl, long size) protected ContentUrlEntity createContentUrlEntity(String contentUrl, long size)
{ {
@@ -135,7 +151,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
} }
} }
public int updateContentUrlOrphanTime(Long id, long orphanTime) public int updateContentUrlOrphanTime(Long id, Long orphanTime)
{ {
ContentUrlEntity contentUrlEntity = new ContentUrlEntity(); ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setId(id); contentUrlEntity.setId(id);
@@ -178,7 +194,14 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
contentDataEntity.setMimetypeId(mimetypeId); contentDataEntity.setMimetypeId(mimetypeId);
contentDataEntity.setEncodingId(encodingId); contentDataEntity.setEncodingId(encodingId);
contentDataEntity.setLocaleId(localeId); contentDataEntity.setLocaleId(localeId);
template.insert(INSERT_CONTENT_DATA, contentDataEntity); try
{
template.insert(INSERT_CONTENT_DATA, contentDataEntity);
}
catch (Throwable e)
{
throw new AlfrescoRuntimeException("Failed to insert ContentData: " + contentDataEntity, e);
}
// Done // Done
return contentDataEntity; return contentDataEntity;
} }
@@ -226,23 +249,30 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
public void deleteContentDataForNode(Long nodeId, Set<Long> qnameIds) public void deleteContentDataForNode(Long nodeId, Set<Long> qnameIds)
{ {
/* if (qnameIds.size() == 0)
* TODO: use IN clause in parameters
*/
for (Long qnameId : qnameIds)
{ {
// Get the ContentData that matches (may be multiple due to collection properties) // There will be no results
Map<String, Object> params = new HashMap<String, Object>(11); return;
params.put("nodeId", nodeId); }
params.put("qnameId", qnameId); IdsEntity idsEntity = new IdsEntity();
@SuppressWarnings("unchecked") idsEntity.setIdOne(nodeId);
List<Long> ids = (List<Long>) template.queryForList(SELECT_CONTENT_DATA_BY_NODE_AND_QNAME, params); idsEntity.setIds(new ArrayList<Long>(qnameIds));
// Delete each one @SuppressWarnings("unchecked")
for (Long id : ids) List<Long> ids = (List<Long>) template.queryForList(SELECT_CONTENT_DATA_BY_NODE_AND_QNAME, idsEntity);
// Delete each one
for (Long id : ids)
{
try
{ {
// Delete the ContentData entity // Delete the ContentData entity
deleteContentData(id); deleteContentData(id);
} }
catch (ConcurrencyFailureException e)
{
// The DB may return results even though the row has just been
// deleted. Since we are deleting the row, it doesn't matter
// if it is deleted here or not.
}
} }
} }
} }

View File

@@ -19,8 +19,12 @@
package org.alfresco.repo.domain.patch; package org.alfresco.repo.domain.patch;
import java.util.List; import java.util.List;
import java.util.Map;
import org.alfresco.ibatis.BatchingDAO;
import org.alfresco.repo.domain.avm.AVMNodeEntity; import org.alfresco.repo.domain.avm.AVMNodeEntity;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.service.cmr.repository.ContentData;
/** /**
@@ -31,8 +35,25 @@ import org.alfresco.repo.domain.avm.AVMNodeEntity;
* @author janv * @author janv
* @since 3.2 * @since 3.2
*/ */
public abstract class AbstractPatchDAOImpl implements PatchDAO public abstract class AbstractPatchDAOImpl implements PatchDAO, BatchingDAO
{ {
private ContentDataDAO contentDataDAO;
protected AbstractPatchDAOImpl()
{
}
/**
* Set the DAO that supplies {@link ContentData} IDs
*/
public void setContentDataDAO(ContentDataDAO contentDataDAO)
{
this.contentDataDAO = contentDataDAO;
}
/**
* {@inheritDoc}
*/
public Long getAVMNodesCountWhereNewInStore() public Long getAVMNodesCountWhereNewInStore()
{ {
return getAVMNodeEntitiesCountWhereNewInStore(); return getAVMNodeEntitiesCountWhereNewInStore();
@@ -63,4 +84,91 @@ public abstract class AbstractPatchDAOImpl implements PatchDAO
protected abstract List<AVMNodeEntity> getNullVersionLayeredDirectoryNodeEntities(); protected abstract List<AVMNodeEntity> getNullVersionLayeredDirectoryNodeEntities();
protected abstract List<AVMNodeEntity> getNullVersionLayeredFileNodeEntities(); protected abstract List<AVMNodeEntity> getNullVersionLayeredFileNodeEntities();
/**
* {@inheritDoc}
* <p>
* @see #getAdmOldContentProperties(Long, Long)
*/
public void updateAdmV31ContentProperties(Long minNodeId, Long maxNodeId)
{
List<Map<String, Object>> props = getAdmOldContentProperties(minNodeId, maxNodeId);
// Do a first pass to create the ContentData IDs
for (Map<String, Object> prop : props)
{
String stringValue = (String) prop.get("stringValue");
try
{
ContentData contentData = ContentData.createContentProperty(stringValue);
Long contentDataId = contentDataDAO.createContentData(contentData).getFirst();
prop.put("contentDataId", contentDataId);
}
catch (Throwable e)
{
// We don't care about this too much as it'll just leak a binary
}
}
// Now do the updates in the context of a batch
try
{
// Run using a batch
startBatch();
for (Map<String, Object> prop : props)
{
Long nodeId = (Long) prop.get("nodeId");
Long qnameId = (Long) prop.get("qnameId");
Integer listIndex = (Integer) prop.get("listIndex");
Long localeId = (Long) prop.get("localeId");
Long contentDataId = (Long) prop.get("contentDataId");
if (contentDataId == null)
{
// There was a problem with this
continue;
}
// Update
updateAdmOldContentProperty(nodeId, qnameId, listIndex, localeId, contentDataId);
}
}
finally
{
executeBatch();
}
}
/**
* Results are of the form:
* <pre>
* nodeId: java.lang.Long
* qnameId: java.lang.Long
* listIndex: java.lang.Integer
* localeId: java.lang.Long
* stringValue: java.lang.String
* </pre>
*
*
* @param minNodeId inclusive lower bound for Node ID
* @param maxNodeId exclusive upper bound for Node ID
* @return Returns a map of query results
*/
protected abstract List<Map<String, Object>> getAdmOldContentProperties(Long minNodeId, Long maxNodeId);
/**
*
* @param nodeId part of the unique key
* @param qnameId part of the unique key
* @param listIndex part of the unique key
* @param localeId part of the unique key
* @param longValue the new ContentData ID
* @return Returns the row update count
*/
protected abstract void updateAdmOldContentProperty(
Long nodeId,
Long qnameId,
Integer listIndex,
Long localeId,
Long longValue);
} }

View File

@@ -21,12 +21,14 @@ package org.alfresco.repo.domain.patch;
import java.util.List; import java.util.List;
import org.alfresco.repo.domain.avm.AVMNodeEntity; import org.alfresco.repo.domain.avm.AVMNodeEntity;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.service.cmr.repository.ContentData;
/** /**
* Additional DAO services for patches * Additional DAO services for patches
* *
* @author janv * @author janv
* @author Derek Hulley
* @since 3.2 * @since 3.2
*/ */
public interface PatchDAO public interface PatchDAO
@@ -40,4 +42,21 @@ public interface PatchDAO
public List<AVMNodeEntity> getNullVersionLayeredDirectories(int count); public List<AVMNodeEntity> getNullVersionLayeredDirectories(int count);
public List<AVMNodeEntity> getNullVersionLayeredFiles(int count); public List<AVMNodeEntity> getNullVersionLayeredFiles(int count);
public Long getMaxAvmNodeID();
public List<Long> getAvmNodesWithOldContentProperties(Long minNodeId, Long maxNodeId);
// DM-related
public Long getMaxAdmNodeID();
/**
* Migrates DM content properties from the old V3.1 format (String-based {@link ContentData#toString()})
* to the new V3.2 format (ID based storage using {@link ContentDataDAO}).
*
* @param minNodeId the inclusive node ID to limit the updates to
* @param maxNodeId the exclusive node ID to limit the updates to
*/
public void updateAdmV31ContentProperties(Long minNodeId, Long maxNodeId);
} }

View File

@@ -18,8 +18,12 @@
*/ */
package org.alfresco.repo.domain.patch.ibatis; package org.alfresco.repo.domain.patch.ibatis;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import org.alfresco.ibatis.IdsEntity;
import org.alfresco.repo.domain.avm.AVMNodeEntity; import org.alfresco.repo.domain.avm.AVMNodeEntity;
import org.alfresco.repo.domain.patch.AbstractPatchDAOImpl; import org.alfresco.repo.domain.patch.AbstractPatchDAOImpl;
import org.springframework.orm.ibatis.SqlMapClientTemplate; import org.springframework.orm.ibatis.SqlMapClientTemplate;
@@ -36,6 +40,11 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl
private static final String SELECT_AVM_NODE_ENTITIES_WITH_EMPTY_GUID = "alfresco.avm.select_AVMNodesWithEmptyGUID"; private static final String SELECT_AVM_NODE_ENTITIES_WITH_EMPTY_GUID = "alfresco.avm.select_AVMNodesWithEmptyGUID";
private static final String SELECT_AVM_LD_NODE_ENTITIES_NULL_VERSION = "alfresco.avm.select_AVMNodes_nullVersionLayeredDirectories"; private static final String SELECT_AVM_LD_NODE_ENTITIES_NULL_VERSION = "alfresco.avm.select_AVMNodes_nullVersionLayeredDirectories";
private static final String SELECT_AVM_LF_NODE_ENTITIES_NULL_VERSION = "alfresco.avm.select_AVMNodes_nullVersionLayeredFiles"; private static final String SELECT_AVM_LF_NODE_ENTITIES_NULL_VERSION = "alfresco.avm.select_AVMNodes_nullVersionLayeredFiles";
private static final String SELECT_AVM_MAX_NODE_ID = "alfresco.patch.select_avmMaxNodeId";
private static final String SELECT_ADM_MAX_NODE_ID = "alfresco.patch.select_admMaxNodeId";
private static final String SELECT_AVM_NODES_WITH_OLD_CONTENT_PROPERTIES = "alfresco.patch.select_avmNodesWithOldContentProperties";
private static final String SELECT_ADM_OLD_CONTENT_PROPERTIES = "alfresco.patch.select_admOldContentProperties";
private static final String UPDATE_ADM_OLD_CONTENT_PROPERTY = "alfresco.patch.update_admOldContentProperty";
private SqlMapClientTemplate template; private SqlMapClientTemplate template;
@@ -44,6 +53,30 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl
this.template = sqlMapClientTemplate; this.template = sqlMapClientTemplate;
} }
public void startBatch()
{
try
{
template.getSqlMapClient().startBatch();
}
catch (SQLException e)
{
throw new RuntimeException("Failed to start batch", e);
}
}
public void executeBatch()
{
try
{
template.getSqlMapClient().executeBatch();
}
catch (SQLException e)
{
throw new RuntimeException("Failed to start batch", e);
}
}
@Override @Override
protected Long getAVMNodeEntitiesCountWhereNewInStore() protected Long getAVMNodeEntitiesCountWhereNewInStore()
{ {
@@ -70,4 +103,45 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl
{ {
return (List<AVMNodeEntity>) template.queryForList(SELECT_AVM_LF_NODE_ENTITIES_NULL_VERSION); return (List<AVMNodeEntity>) template.queryForList(SELECT_AVM_LF_NODE_ENTITIES_NULL_VERSION);
} }
public Long getMaxAvmNodeID()
{
return (Long) template.queryForObject(SELECT_AVM_MAX_NODE_ID);
}
@SuppressWarnings("unchecked")
public List<Long> getAvmNodesWithOldContentProperties(Long minNodeId, Long maxNodeId)
{
IdsEntity ids = new IdsEntity();
ids.setIdOne(minNodeId);
ids.setIdTwo(maxNodeId);
return (List<Long>) template.queryForList(SELECT_AVM_NODES_WITH_OLD_CONTENT_PROPERTIES, ids);
}
public Long getMaxAdmNodeID()
{
return (Long) template.queryForObject(SELECT_ADM_MAX_NODE_ID);
}
@SuppressWarnings("unchecked")
@Override
protected List<Map<String, Object>> getAdmOldContentProperties(Long minNodeId, Long maxNodeId)
{
IdsEntity ids = new IdsEntity();
ids.setIdOne(minNodeId);
ids.setIdTwo(maxNodeId);
return (List<Map<String, Object>>) template.queryForList(SELECT_ADM_OLD_CONTENT_PROPERTIES, ids);
}
@Override
protected void updateAdmOldContentProperty(Long nodeId, Long qnameId, Integer listIndex, Long localeId, Long longValue)
{
Map<String, Object> params = new HashMap<String, Object>(11);
params.put("nodeId", nodeId);
params.put("qnameId", qnameId);
params.put("listIndex", listIndex);
params.put("localeId", localeId);
params.put("longValue", longValue);
template.update(UPDATE_ADM_OLD_CONTENT_PROPERTY, params);
}
} }

View File

@@ -46,6 +46,7 @@ import org.alfresco.model.ContentModel;
import org.alfresco.repo.cache.SimpleCache; import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.domain.AuditableProperties; import org.alfresco.repo.domain.AuditableProperties;
import org.alfresco.repo.domain.ChildAssoc; import org.alfresco.repo.domain.ChildAssoc;
import org.alfresco.repo.domain.ContentDataId;
import org.alfresco.repo.domain.DbAccessControlList; import org.alfresco.repo.domain.DbAccessControlList;
import org.alfresco.repo.domain.LocaleDAO; import org.alfresco.repo.domain.LocaleDAO;
import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.Node;
@@ -4986,11 +4987,13 @@ public class HibernateNodeDaoServiceImpl
" Value: " + value); " Value: " + value);
} }
// Handle ContentData // Handle ContentData
if (value instanceof ContentData && propertyTypeQName.equals(DataTypeDefinition.CONTENT)) // We used to check the property type, but we now handle d:any ContentData as well
if (value instanceof ContentData)
{ {
// Needs converting to an ID // Needs converting to an ID
ContentData contentData = (ContentData) value; ContentData contentData = (ContentData) value;
value = contentDataDAO.createContentData(contentData).getFirst(); Long contentDataId = contentDataDAO.createContentData(contentData).getFirst();
value = new ContentDataId(contentDataId);
} }
// Handle MLText // Handle MLText
if (value instanceof MLText) if (value instanceof MLText)
@@ -5374,8 +5377,24 @@ public class HibernateNodeDaoServiceImpl
{ {
Serializable value = propertyValue.getValue(propertyTypeQName); Serializable value = propertyValue.getValue(propertyTypeQName);
// Handle conversions to and from ContentData // Handle conversions to and from ContentData
if (propertyTypeQName.equals(DataTypeDefinition.CONTENT) && (value instanceof Long)) if (value instanceof ContentDataId)
{ {
// ContentData used to be persisted
Long contentDataId = ((ContentDataId) value).getId();
Pair<Long, ContentData> contentDataPair = contentDataDAO.getContentData(contentDataId);
if (contentDataPair == null)
{
// It is invalid
value = null;
}
else
{
value = contentDataPair.getSecond();
}
}
else if (propertyTypeQName.equals(DataTypeDefinition.CONTENT) && (value instanceof Long))
{
// ContentData used to be persisted
Pair<Long, ContentData> contentDataPair = contentDataDAO.getContentData((Long)value); Pair<Long, ContentData> contentDataPair = contentDataDAO.getContentData((Long)value);
if (contentDataPair == null) if (contentDataPair == null)
{ {

View File

@@ -289,24 +289,22 @@ public class AVMFullIndexRecoveryComponent extends AbstractReindexComponent
logger.info(" Rebuilding index for " + store); logger.info(" Rebuilding index for " + store);
} }
if (!avmSnapShotTriggeredIndexingMethodInterceptor.hasIndexBeenCreated(store))
{
avmSnapShotTriggeredIndexingMethodInterceptor.createIndex(store);
}
final int latest = avmService.getLatestSnapshotID(store); final int latest = avmService.getLatestSnapshotID(store);
if (latest <= 0) if (latest <= 0)
{ {
if (!avmSnapShotTriggeredIndexingMethodInterceptor.hasIndexBeenCreated(store))
{
avmSnapShotTriggeredIndexingMethodInterceptor.createIndex(store);
}
return; return;
} }
final int latestIndexed = avmSnapShotTriggeredIndexingMethodInterceptor.getLastIndexedSnapshot(store); final int latestIndexed = avmSnapShotTriggeredIndexingMethodInterceptor.getLastIndexedSnapshot(store);
RetryingTransactionCallback<Object> reindexWork = new RetryingTransactionCallback<Object>() RetryingTransactionCallback<Object> reindexWork = new RetryingTransactionCallback<Object>()
{ {
public Object execute() throws Exception public Object execute() throws Exception
{ {
if (mode == RecoveryMode.AUTO) if (mode == RecoveryMode.AUTO)
{ {
logger.info(" Rebuilding index for snapshots " + latestIndexed +" to "+latest); logger.info(" Rebuilding index for snapshots " + latestIndexed +" to "+latest);

View File

@@ -38,7 +38,7 @@ import org.alfresco.repo.attributes.Attribute;
import org.alfresco.repo.attributes.LongAttributeValue; import org.alfresco.repo.attributes.LongAttributeValue;
import org.alfresco.repo.attributes.MapAttributeValue; import org.alfresco.repo.attributes.MapAttributeValue;
import org.alfresco.repo.batch.BatchProcessor; import org.alfresco.repo.batch.BatchProcessor;
import org.alfresco.repo.batch.BatchProcessor.Worker; import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker;
import org.alfresco.repo.lock.JobLockService; import org.alfresco.repo.lock.JobLockService;
import org.alfresco.repo.lock.LockAcquisitionException; import org.alfresco.repo.lock.LockAcquisitionException;
import org.alfresco.repo.management.subsystems.ActivateableBean; import org.alfresco.repo.management.subsystems.ActivateableBean;
@@ -538,10 +538,14 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
// First, analyze the group structure. Create maps of authorities to their parents for associations to create // First, analyze the group structure. Create maps of authorities to their parents for associations to create
// and delete. Also deal with 'overlaps' with other zones in the authentication chain. // and delete. Also deal with 'overlaps' with other zones in the authentication chain.
final BatchProcessor<NodeDescription> groupProcessor = new BatchProcessor<NodeDescription>( final BatchProcessor<NodeDescription> groupProcessor = new BatchProcessor<NodeDescription>(
ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, zone + " Group Analysis",
this.applicationEventPublisher, userRegistry.getGroups(lastModified), zone + " Group Analysis", this.retryingTransactionHelper,
this.loggingInterval, this.workerThreads, 20); userRegistry.getGroups(lastModified),
class Analyzer implements Worker<NodeDescription> this.workerThreads, 20,
this.applicationEventPublisher,
ChainingUserRegistrySynchronizer.logger,
this.loggingInterval);
class Analyzer implements BatchProcessWorker<NodeDescription>
{ {
private final Set<String> allZoneAuthorities = new TreeSet<String>(); private final Set<String> allZoneAuthorities = new TreeSet<String>();
private final Map<String, String> groupsToCreate = new TreeMap<String, String>(); private final Map<String, String> groupsToCreate = new TreeMap<String, String>();
@@ -579,19 +583,27 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
return this.groupAssocsToDelete; return this.groupAssocsToDelete;
} }
/*
* (non-Javadoc)
* @see org.alfresco.repo.security.sync.BatchProcessor.Worker#getIdentifier(java.lang.Object)
*/
public String getIdentifier(NodeDescription entry) public String getIdentifier(NodeDescription entry)
{ {
return entry.getSourceId(); return entry.getSourceId();
} }
/* public void beforeProcess() throws Throwable
* (non-Javadoc) {
* @see org.alfresco.repo.security.sync.BatchProcessor.Worker#process(java.lang.Object) // Disable rules
*/ ruleService.disableRules();
// Authentication
AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName());
}
public void afterProcess() throws Throwable
{
// Enable rules
ruleService.enableRules();
// Clear authentication
AuthenticationUtil.clearCurrentSecurityContext();
}
public void process(NodeDescription group) throws Throwable public void process(NodeDescription group) throws Throwable
{ {
PropertyMap groupProperties = group.getProperties(); PropertyMap groupProperties = group.getProperties();
@@ -801,17 +813,36 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
// Add the groups and their parent associations in depth-first order // Add the groups and their parent associations in depth-first order
final Map<String, String> groupsToCreate = groupAnalyzer.getGroupsToCreate(); final Map<String, String> groupsToCreate = groupAnalyzer.getGroupsToCreate();
BatchProcessor<Map.Entry<String, Set<String>>> groupCreator = new BatchProcessor<Map.Entry<String, Set<String>>>( BatchProcessor<Map.Entry<String, Set<String>>> groupCreator = new BatchProcessor<Map.Entry<String, Set<String>>>(
ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, zone + " Group Creation and Association",
this.applicationEventPublisher, sortedGroupAssociations.entrySet(), zone this.retryingTransactionHelper,
+ " Group Creation and Association", this.loggingInterval, this.workerThreads, 20); sortedGroupAssociations.entrySet(),
groupCreator.process(new Worker<Map.Entry<String, Set<String>>>() this.workerThreads, 20,
this.applicationEventPublisher,
ChainingUserRegistrySynchronizer.logger,
this.loggingInterval);
groupCreator.process(new BatchProcessWorker<Map.Entry<String, Set<String>>>()
{ {
public String getIdentifier(Map.Entry<String, Set<String>> entry) public String getIdentifier(Map.Entry<String, Set<String>> entry)
{ {
return entry.getKey() + " " + entry.getValue(); return entry.getKey() + " " + entry.getValue();
} }
public void beforeProcess() throws Throwable
{
// Disable rules
ruleService.disableRules();
// Authentication
AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName());
}
public void afterProcess() throws Throwable
{
// Enable rules
ruleService.enableRules();
// Clear authentication
AuthenticationUtil.clearCurrentSecurityContext();
}
public void process(Map.Entry<String, Set<String>> entry) throws Throwable public void process(Map.Entry<String, Set<String>> entry) throws Throwable
{ {
Set<String> parents = entry.getValue(); Set<String> parents = entry.getValue();
@@ -896,10 +927,14 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
} }
} }
final BatchProcessor<NodeDescription> personProcessor = new BatchProcessor<NodeDescription>( final BatchProcessor<NodeDescription> personProcessor = new BatchProcessor<NodeDescription>(
ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, zone + " User Creation and Association",
this.applicationEventPublisher, userRegistry.getPersons(lastModified), zone this.retryingTransactionHelper,
+ " User Creation and Association", this.loggingInterval, this.workerThreads, 10); userRegistry.getPersons(lastModified),
class PersonWorker implements Worker<NodeDescription> this.workerThreads, 10,
this.applicationEventPublisher,
ChainingUserRegistrySynchronizer.logger,
this.loggingInterval);
class PersonWorker implements BatchProcessWorker<NodeDescription>
{ {
private long latestTime; private long latestTime;
@@ -918,6 +953,22 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
return entry.getSourceId(); return entry.getSourceId();
} }
public void beforeProcess() throws Throwable
{
// Disable rules
ruleService.disableRules();
// Authentication
AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName());
}
public void afterProcess() throws Throwable
{
// Enable rules
ruleService.enableRules();
// Clear authentication
AuthenticationUtil.clearCurrentSecurityContext();
}
public void process(NodeDescription person) throws Throwable public void process(NodeDescription person) throws Throwable
{ {
PropertyMap personProperties = person.getProperties(); PropertyMap personProperties = person.getProperties();
@@ -1055,10 +1106,14 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
if (allowDeletions) if (allowDeletions)
{ {
BatchProcessor<String> authorityDeletionProcessor = new BatchProcessor<String>( BatchProcessor<String> authorityDeletionProcessor = new BatchProcessor<String>(
ChainingUserRegistrySynchronizer.logger, this.retryingTransactionHelper, this.ruleService, zone + " Authority Deletion",
this.applicationEventPublisher, deletionCandidates, zone + " Authority Deletion", this.retryingTransactionHelper,
this.loggingInterval, this.workerThreads, 10); deletionCandidates,
class AuthorityDeleter implements Worker<String> this.workerThreads, 10,
this.applicationEventPublisher,
ChainingUserRegistrySynchronizer.logger,
this.loggingInterval);
class AuthorityDeleter implements BatchProcessWorker<String>
{ {
private int personProcessedCount; private int personProcessedCount;
private int groupProcessedCount; private int groupProcessedCount;
@@ -1078,6 +1133,22 @@ public class ChainingUserRegistrySynchronizer extends AbstractLifecycleBean impl
return entry; return entry;
} }
public void beforeProcess() throws Throwable
{
// Disable rules
ruleService.disableRules();
// Authentication
AuthenticationUtil.setRunAsUser(AuthenticationUtil.getSystemUserName());
}
public void afterProcess() throws Throwable
{
// Enable rules
ruleService.enableRules();
// Clear authentication
AuthenticationUtil.clearCurrentSecurityContext();
}
public void process(String authority) throws Throwable public void process(String authority) throws Throwable
{ {
if (AuthorityType.getAuthorityType(authority) == AuthorityType.USER) if (AuthorityType.getAuthorityType(authority) == AuthorityType.USER)

View File

@@ -61,6 +61,9 @@ public class TraceableThreadFactory implements ThreadFactory
this.namePrefix = "TraceableThread-" + factoryNumber.getAndIncrement() + "-thread-"; this.namePrefix = "TraceableThread-" + factoryNumber.getAndIncrement() + "-thread-";
this.threadNumber = new AtomicInteger(1); this.threadNumber = new AtomicInteger(1);
this.threadDaemon = true;
this.threadPriority = Thread.NORM_PRIORITY;
} }
/** /**