mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-31 17:39:05 +00:00
Merged V3.2 to HEAD
17574: Merged in DEV work for ContentStoreCleaner: ETHREEOH-2813 17432: Build up for fix of ETHREEOH-2813: ContentStoreCleaner doesn't scale 17546: ContentStoreCleaner fixes and further tests 17524: Unit tests and bulk queries for orphaned content 17506: W.I.P. for content cleaner for V3.2: ETHREEOH-2813 17575: Missed check-in (other DB create scripts look OK) 17577: Re-activated 'contentStoreCleanerTrigger' - Added system property: system.content.orphanCleanup.cronExpression=0 0 4 * * ? - Other useful properties: system.content.eagerOrphanCleanup=false system.content.orphanProtectDays=14 17578: Fixed MT test and sample contexts after recent content cleaner changes 17579: Fixed DB2 unique index creation for content URLs 17580: First pass at fix for ETHREEOH-3454: Port enterprise upgrade scripts for ContentStoreCleaner changes ___________________________________________________________________ Modified: svn:mergeinfo Merged /alfresco/BRANCHES/V3.2:r17574-17575,17577-17580 git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@18151 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -43,6 +43,9 @@
|
||||
</property>
|
||||
<property name="newAvmNodeLinksDAO">
|
||||
<ref bean="newAvmNodeLinksDAO"/>
|
||||
</property>
|
||||
<property name="contentDataDAO">
|
||||
<ref bean="contentDataDAO"/>
|
||||
</property>
|
||||
<property name="avmStoreDAO">
|
||||
<ref bean="avmStoreDAO"/>
|
||||
|
@@ -103,6 +103,7 @@
|
||||
<ref bean="patch.db-V2.2-Person-3" />
|
||||
<ref bean="patch.db-V3.2-LockTables" />
|
||||
<ref bean="patch.db-V3.2-ContentTables" />
|
||||
<ref bean="patch.db-V3.2-ContentTables2" />
|
||||
<ref bean="patch.db-V3.2-PropertyValueTables" />
|
||||
<ref bean="patch.db-V3.2-AuditTables" />
|
||||
<ref bean="patch.db-V3.2-Child-Assoc-QName-CRC" />
|
||||
|
@@ -39,13 +39,16 @@
|
||||
|
||||
<!-- Abstract bean definition defining base definition for content store cleaner -->
|
||||
<!-- Performs the content cleanup -->
|
||||
<bean id="baseContentStoreCleaner" class="org.alfresco.repo.content.cleanup.ContentStoreCleaner" abstract="true">
|
||||
<bean id="contentStoreCleaner" class="org.alfresco.repo.content.cleanup.ContentStoreCleaner" init-method="init">
|
||||
<property name="protectDays" >
|
||||
<value>${system.content.orphanProtectDays}</value>
|
||||
</property>
|
||||
<property name="eagerContentStoreCleaner" >
|
||||
<ref bean="eagerContentStoreCleaner" />
|
||||
</property>
|
||||
<property name="jobLockService">
|
||||
<ref bean="jobLockService" />
|
||||
</property>
|
||||
<property name="contentCleanDAO">
|
||||
<ref bean="contentCleanDAO"/>
|
||||
</property>
|
||||
<property name="contentDataDAO">
|
||||
<ref bean="contentDataDAO"/>
|
||||
</property>
|
||||
@@ -64,20 +67,6 @@
|
||||
<property name="transactionService" >
|
||||
<ref bean="transactionService" />
|
||||
</property>
|
||||
<property name="protectDays" >
|
||||
<value>14</value>
|
||||
</property>
|
||||
<property name="listeners" >
|
||||
<ref bean="deletedContentBackupListeners" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="contentStoreCleaner" parent="baseContentStoreCleaner" init-method="init">
|
||||
<property name="stores" >
|
||||
<list>
|
||||
<ref bean="fileContentStore" />
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="eagerContentStoreCleaner" class="org.alfresco.repo.content.cleanup.EagerContentStoreCleaner" init-method="init">
|
||||
@@ -120,6 +109,9 @@
|
||||
<property name="imageMagickContentTransformer">
|
||||
<ref bean="transformer.ImageMagick" />
|
||||
</property>
|
||||
<property name="eagerContentStoreCleaner" >
|
||||
<ref bean="eagerContentStoreCleaner" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="contentService" parent="baseContentService">
|
||||
|
@@ -47,10 +47,6 @@
|
||||
<property name="contentStoreCleaner" ref="eagerContentStoreCleaner"/>
|
||||
</bean>
|
||||
|
||||
<bean id="contentCleanDAO" class="org.alfresco.repo.domain.contentclean.ibatis.ContentCleanDAOImpl">
|
||||
<property name="sqlMapClientTemplate" ref="contentSqlMapClientTemplate"/>
|
||||
</bean>
|
||||
|
||||
<bean id="propertyValueDAO" class="org.alfresco.repo.domain.propval.ibatis.PropertyValueDAOImpl">
|
||||
<property name="sqlMapClientTemplate" ref="propertyValueSqlMapClientTemplate"/>
|
||||
<property name="converter">
|
||||
|
@@ -31,12 +31,13 @@ DROP TABLE alf_content_url; --(optional)
|
||||
CREATE TABLE alf_content_url
|
||||
(
|
||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
||||
version BIGINT NOT NULL,
|
||||
content_url VARCHAR(255) NOT NULL,
|
||||
content_url_short VARCHAR(12) NOT NULL,
|
||||
content_url_crc BIGINT NOT NULL,
|
||||
content_size BIGINT NOT NULL,
|
||||
UNIQUE INDEX idx_alf_cont_url_crc (content_url_short, content_url_crc),
|
||||
orphan_time BIGINT NULL,
|
||||
UNIQUE INDEX idx_alf_conturl_cr (content_url_short, content_url_crc),
|
||||
INDEX idx_alf_conturl_ot (orphan_time),
|
||||
PRIMARY KEY (id)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
@@ -55,12 +56,6 @@ CREATE TABLE alf_content_data
|
||||
PRIMARY KEY (id)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
CREATE TABLE alf_content_clean
|
||||
(
|
||||
content_url VARCHAR(255) NOT NULL,
|
||||
INDEX idx_alf_contentclean_url (content_url)
|
||||
) ENGINE=InnoDB;
|
||||
|
||||
--
|
||||
-- Record script finish
|
||||
--
|
||||
|
@@ -33,14 +33,15 @@ DROP TABLE alf_content_url; --(optional)
|
||||
CREATE TABLE alf_content_url
|
||||
(
|
||||
id INT8 NOT NULL,
|
||||
version INT8 NOT NULL,
|
||||
content_url VARCHAR(255) NOT NULL,
|
||||
content_url_short VARCHAR(12) NOT NULL,
|
||||
content_url_crc INT8 NOT NULL,
|
||||
content_size INT8 NOT NULL,
|
||||
orphan_time INT8 NULL,
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
CREATE INDEX idx_alf_cont_url_crc ON alf_content_url (content_url_short, content_url_crc);
|
||||
CREATE UNIQUE INDEX idx_alf_conturl_cr ON alf_content_url (content_url_short, content_url_crc);
|
||||
CREATE INDEX idx_alf_conturl_ot ON alf_content_url (orphan_time);
|
||||
CREATE SEQUENCE alf_content_url_seq START WITH 1 INCREMENT BY 1;
|
||||
|
||||
CREATE TABLE alf_content_data
|
||||
@@ -59,12 +60,6 @@ CREATE TABLE alf_content_data
|
||||
);
|
||||
CREATE SEQUENCE alf_content_data_seq START WITH 1 INCREMENT BY 1;
|
||||
|
||||
CREATE TABLE alf_content_clean
|
||||
(
|
||||
content_url VARCHAR(255) NOT NULL
|
||||
);
|
||||
CREATE INDEX idx_alf_contentclean_url ON alf_content_clean (content_url);
|
||||
|
||||
--
|
||||
-- Record script finish
|
||||
--
|
||||
|
@@ -0,0 +1,38 @@
|
||||
--
|
||||
-- Title: Update Content tables (pre 3.2 Enterprise Final)
|
||||
-- Database: MySQL InnoDB
|
||||
-- Since: V3.2 Schema 3009
|
||||
-- Author: Derek Hulley
|
||||
--
|
||||
-- Please contact support@alfresco.com if you need assistance with the upgrade.
|
||||
--
|
||||
-- This update is required for installations that have run any of the early 3.2
|
||||
-- codelines i.e. anything installed or upgraded to pre-3.2 Enterprise Final.
|
||||
|
||||
-- This is to (a) fix the naming convention and (b) to ensure that the index is UNIQUE
|
||||
DROP INDEX idx_alf_cont_url_crc ON alf_content_url; --(optional)
|
||||
DROP INDEX idx_alf_conturl_cr ON alf_content_url; --(optional)
|
||||
CREATE UNIQUE INDEX idx_alf_conturl_cr ON alf_content_url (content_url_short, content_url_crc);
|
||||
|
||||
-- If this statement fails, it will be because the table already contains
|
||||
-- the orphan column and index
|
||||
ALTER TABLE alf_content_url
|
||||
DROP COLUMN version,
|
||||
ADD COLUMN orphan_time BIGINT NULL AFTER content_size,
|
||||
ADD INDEX idx_alf_conturl_ot (orphan_time)
|
||||
; --(optional)
|
||||
|
||||
-- This table will not exist for upgrades from pre 3.2 to 3.2 Enterprise Final
|
||||
DROP TABLE alf_content_clean; --(optional)
|
||||
|
||||
--
|
||||
-- Record script finish
|
||||
--
|
||||
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V3.2-ContentTables2';
|
||||
INSERT INTO alf_applied_patch
|
||||
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
|
||||
VALUES
|
||||
(
|
||||
'patch.db-V3.2-ContentTables2', 'Manually executed script upgrade V3.2: Content Tables 2 (pre 3.2 Enterprise Final)',
|
||||
0, 3008, -1, 3009, null, 'UNKOWN', ${TRUE}, ${TRUE}, 'Script completed'
|
||||
);
|
@@ -0,0 +1,38 @@
|
||||
--
|
||||
-- Title: Update Content tables (pre 3.2 Enterprise Final)
|
||||
-- Database: PostgreSQLDialect
|
||||
-- Since: V3.2 Schema 3009
|
||||
-- Author: Derek Hulley
|
||||
--
|
||||
-- Please contact support@alfresco.com if you need assistance with the upgrade.
|
||||
--
|
||||
-- This update is required for installations that have run any of the early 3.2
|
||||
-- codelines i.e. anything installed or upgraded to pre-3.2 Enterprise Final.
|
||||
|
||||
-- This is to (a) fix the naming convention and (b) to ensure that the index is UNIQUE
|
||||
DROP INDEX idx_alf_cont_url_crc; --(optional)
|
||||
DROP INDEX idx_alf_conturl_cr; --(optional)
|
||||
CREATE UNIQUE INDEX idx_alf_conturl_cr ON alf_content_url (content_url_short, content_url_crc);
|
||||
|
||||
-- If this statement fails, it will be because the table already contains the orphan column
|
||||
ALTER TABLE alf_content_url
|
||||
DROP COLUMN version,
|
||||
ADD COLUMN orphan_time INT8 NULL
|
||||
; --(optional)
|
||||
CREATE INDEX idx_alf_conturl_ot ON alf_content_url (orphan_time)
|
||||
; --(optional)
|
||||
|
||||
-- This table will not exist for upgrades from pre 3.2 to 3.2 Enterprise Final
|
||||
DROP TABLE alf_content_clean; --(optional)
|
||||
|
||||
--
|
||||
-- Record script finish
|
||||
--
|
||||
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V3.2-ContentTables2';
|
||||
INSERT INTO alf_applied_patch
|
||||
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
|
||||
VALUES
|
||||
(
|
||||
'patch.db-V3.2-ContentTables2', 'Manually executed script upgrade V3.2: Content Tables 2 (pre 3.2 Enterprise Final)',
|
||||
0, 3008, -1, 3009, null, 'UNKOWN', ${TRUE}, ${TRUE}, 'Script completed'
|
||||
);
|
@@ -38,12 +38,18 @@
|
||||
|
||||
<!-- override content store cleaner to use tenant routing file content store -->
|
||||
<!-- Performs the content cleanup -->
|
||||
<bean id="contentStoreCleaner" parent="baseContentStoreCleaner">
|
||||
<bean id="eagerContentStoreCleaner" class="org.alfresco.repo.content.cleanup.EagerContentStoreCleaner" init-method="init">
|
||||
<property name="eagerOrphanCleanup" >
|
||||
<value>${system.content.eagerOrphanCleanup}</value>
|
||||
</property>
|
||||
<property name="stores" >
|
||||
<list>
|
||||
<ref bean="tenantFileContentStore" />
|
||||
</list>
|
||||
</property>
|
||||
<property name="listeners" >
|
||||
<ref bean="deletedContentBackupListeners" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- override content service to use tenant routing file content store -->
|
||||
|
@@ -34,11 +34,11 @@
|
||||
|
||||
<resultMap id="result_ContentUrl" class="ContentUrl">
|
||||
<result property="id" column="id" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
<result property="version" column="version" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
<result property="contentUrl" column="content_url" jdbcType="VARCHAR" javaType="java.lang.String"/>
|
||||
<result property="contentUrlShort" column="content_url_short" jdbcType="VARCHAR" javaType="java.lang.String"/>
|
||||
<result property="contentUrlCrc" column="content_url_crc" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
<result property="size" column="content_size" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
<result property="orphanTime" column="orphan_time" jdbcType="BIGINT" javaType="java.lang.Long"/>
|
||||
</resultMap>
|
||||
|
||||
<resultMap id="result_ContentData" class="ContentData">
|
||||
@@ -88,8 +88,8 @@
|
||||
</sql>
|
||||
|
||||
<sql id="insert_ContentUrl_AutoIncrement">
|
||||
insert into alf_content_url (version, content_url, content_url_short, content_url_crc, content_size)
|
||||
values (#version#, #contentUrl#, #contentUrlShort#, #contentUrlCrc#, #size#)
|
||||
insert into alf_content_url (content_url, content_url_short, content_url_crc, content_size, orphan_time)
|
||||
values (#contentUrl#, #contentUrlShort#, #contentUrlCrc#, #size#, #orphanTime#)
|
||||
</sql>
|
||||
|
||||
<sql id="insert_ContentData_AutoIncrement">
|
||||
@@ -175,13 +175,49 @@
|
||||
cd.id is null
|
||||
</select>
|
||||
|
||||
<!-- Get content URL entities that were orphaned before a give time -->
|
||||
<select id="select_ContentUrlByOrphanTime" parameterClass="ContentUrl" resultMap="result_ContentUrl">
|
||||
<![CDATA[
|
||||
select
|
||||
cu.*
|
||||
from
|
||||
alf_content_url cu
|
||||
where
|
||||
cu.orphan_time <= #orphanTime#
|
||||
]]>
|
||||
</select>
|
||||
|
||||
<!-- Mark a content URL entity as orphaned -->
|
||||
<update id="update_ContentUrlOrphanTime" parameterClass="ContentUrl">
|
||||
update
|
||||
alf_content_url
|
||||
set
|
||||
orphan_time = #orphanTime#
|
||||
where
|
||||
id = #id#
|
||||
</update>
|
||||
|
||||
<!-- Delete ContentUrl entity -->
|
||||
<delete id="delete_ContentUrl" parameterMap="parameter_IdMap">
|
||||
<delete id="delete_ContentUrls" parameterClass="list">
|
||||
delete
|
||||
from
|
||||
alf_content_url
|
||||
where
|
||||
id = ?
|
||||
id in
|
||||
<iterate open="(" close=")" conjunction=",">
|
||||
#[]#
|
||||
</iterate>
|
||||
</delete>
|
||||
|
||||
<!-- Delete ContentUrl entities orphaned before a given time -->
|
||||
<delete id="delete_ContentUrlByOrphanTime" parameterClass="ContentUrl">
|
||||
<![CDATA[
|
||||
delete
|
||||
from
|
||||
alf_content_url
|
||||
where
|
||||
orphan_time <= #orphanTime#
|
||||
]]>
|
||||
</delete>
|
||||
|
||||
<!-- Get the ContentData entity by ID -->
|
||||
@@ -215,6 +251,22 @@
|
||||
np.persisted_type_n = 3
|
||||
</select>
|
||||
|
||||
<update id="update_ContentData" parameterClass="ContentData">
|
||||
update
|
||||
alf_content_data
|
||||
set
|
||||
version = #version#,
|
||||
content_url_id = #contentUrlId#,
|
||||
content_mimetype_id = #mimetypeId#,
|
||||
content_encoding_id = #encodingId#,
|
||||
content_locale_id = #localeId#
|
||||
where
|
||||
id = #id#
|
||||
<isGreaterThan property="version" compareValue="1">
|
||||
and version = (#version#-1)
|
||||
</isGreaterThan>
|
||||
</update>
|
||||
|
||||
<!-- Delete ContentData entity -->
|
||||
<delete id="delete_ContentData" parameterMap="parameter_IdMap">
|
||||
delete
|
||||
@@ -224,41 +276,4 @@
|
||||
id = ?
|
||||
</delete>
|
||||
|
||||
<!-- Select all content URLs -->
|
||||
<select id="select_ContentUrls" resultClass="string">
|
||||
select
|
||||
content_url
|
||||
from
|
||||
alf_content_url
|
||||
</select>
|
||||
|
||||
<!-- Insert a Content Clean URL -->
|
||||
<insert id="insert_ContentCleanUrl" parameterClass="ContentClean" >
|
||||
insert into alf_content_clean (content_url) values (#contentUrl#)
|
||||
</insert>
|
||||
|
||||
<!-- Select all content clean URLs -->
|
||||
<select id="select_ContentCleanUrls" resultClass="string">
|
||||
select
|
||||
content_url
|
||||
from
|
||||
alf_content_clean
|
||||
</select>
|
||||
|
||||
<!-- Remove a Content Clean URL -->
|
||||
<delete id="delete_ContentCleanUrl" parameterClass="ContentClean" >
|
||||
delete
|
||||
from
|
||||
alf_content_clean
|
||||
where
|
||||
content_url = #contentUrl#
|
||||
</delete>
|
||||
|
||||
<!-- Remove all Content Clean URLs -->
|
||||
<delete id="delete_ContentCleanUrls" >
|
||||
delete
|
||||
from
|
||||
alf_content_clean
|
||||
</delete>
|
||||
|
||||
</sqlMap>
|
@@ -34,8 +34,8 @@
|
||||
select nextVal('alf_content_url_seq')
|
||||
</selectKey>
|
||||
|
||||
insert into alf_content_url (id, version, content_url, content_url_short, content_url_crc, content_size)
|
||||
values (#id#, #version#, #contentUrl#, #contentUrlShort#, #contentUrlCrc#, #size#)
|
||||
insert into alf_content_url (id, content_url, content_url_short, content_url_crc, content_size, orphan_time)
|
||||
values (#id#, #contentUrl#, #contentUrlShort#, #contentUrlCrc#, #size#, #orphanTime#)
|
||||
|
||||
</insert>
|
||||
|
||||
|
@@ -1962,10 +1962,6 @@
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- -->
|
||||
<!-- Patch definitions -->
|
||||
<!-- -->
|
||||
|
||||
<bean id="patch.fixNameCrcValues-2" class="org.alfresco.repo.admin.patch.impl.FixNameCrcValuesPatch" parent="basePatch" >
|
||||
<property name="id"><value>patch.fixNameCrcValues-2</value></property>
|
||||
<property name="description"><value>patch.fixNameCrcValues.description</value></property>
|
||||
@@ -2009,4 +2005,21 @@
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="patch.db-V3.2-ContentTables2" class="org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch" parent="basePatch">
|
||||
<property name="id"><value>patch.db-V3.2-ContentTables2</value></property>
|
||||
<property name="description"><value>patch.schemaUpgradeScript.description</value></property>
|
||||
<property name="fixesFromSchema"><value>0</value></property>
|
||||
<property name="fixesToSchema"><value>4001</value></property>
|
||||
<property name="targetSchema"><value>4002</value></property>
|
||||
<property name="scriptUrl">
|
||||
<!-- Share a create script -->
|
||||
<value>classpath:alfresco/dbscripts/upgrade/3.2/${db.script.dialect}/AlfrescoSchemaUpdate-3.2-ContentTables2.sql</value>
|
||||
</property>
|
||||
<property name="dependsOn" >
|
||||
<list>
|
||||
<ref bean="patch.db-V3.2-ContentTables" />
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
</beans>
|
||||
|
@@ -137,6 +137,11 @@ system.enableTimestampPropagation=false
|
||||
# Decide if content should be removed from the system immediately after being orphaned.
|
||||
# Do not change this unless you have examined the impact it has on your backup procedures.
|
||||
system.content.eagerOrphanCleanup=false
|
||||
# The number of days to keep orphaned content in the content stores.
|
||||
# This has no effect on the 'deleted' content stores, which are not automatically emptied.
|
||||
system.content.orphanProtectDays=14
|
||||
# The CRON expression to trigger the deletion of resources associated with orphaned content.
|
||||
system.content.orphanCleanup.cronExpression=0 0 4 * * ?
|
||||
|
||||
# #################### #
|
||||
# Lucene configuration #
|
||||
|
@@ -102,7 +102,7 @@
|
||||
</bean>
|
||||
-->
|
||||
|
||||
<bean id="tempFileCleanerTrigger" class="org.alfresco.util.TriggerBean">
|
||||
<bean id="tempFileCleanerTrigger" class="org.alfresco.util.CronTriggerBean">
|
||||
<property name="jobDetail">
|
||||
<bean id="tempFileCleanerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
|
||||
<property name="jobClass">
|
||||
@@ -120,17 +120,13 @@
|
||||
<property name="scheduler">
|
||||
<ref bean="schedulerFactory" />
|
||||
</property>
|
||||
<!-- start after half an hour and repeat hourly -->
|
||||
<property name="startDelayMinutes">
|
||||
<value>30</value>
|
||||
</property>
|
||||
<property name="repeatIntervalMinutes">
|
||||
<value>60</value>
|
||||
<!-- Repeat hourly on the half hour -->
|
||||
<property name="cronExpression">
|
||||
<value>0 30 * * * ?</value>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!--
|
||||
<bean id="fileContentStoreCleanerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
|
||||
<bean id="contentStoreCleanerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
|
||||
<property name="jobClass">
|
||||
<value>org.alfresco.repo.content.cleanup.ContentStoreCleanupJob</value>
|
||||
</property>
|
||||
@@ -144,16 +140,15 @@
|
||||
</bean>
|
||||
<bean id="contentStoreCleanerTrigger" class="org.alfresco.util.CronTriggerBean">
|
||||
<property name="jobDetail">
|
||||
<ref bean="fileContentStoreCleanerJobDetail" />
|
||||
<ref bean="contentStoreCleanerJobDetail" />
|
||||
</property>
|
||||
<property name="scheduler">
|
||||
<ref bean="schedulerFactory" />
|
||||
</property>
|
||||
<property name="cronExpression">
|
||||
<value>0 0 4 * * ?</value>
|
||||
<value>${system.content.orphanCleanup.cronExpression}</value>
|
||||
</property>
|
||||
</bean>
|
||||
-->
|
||||
|
||||
<bean id="nodeServiceCleanupJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
|
||||
<property name="jobClass">
|
||||
|
@@ -19,4 +19,4 @@ version.build=@build-number@
|
||||
|
||||
# Schema number
|
||||
|
||||
version.schema=4001
|
||||
version.schema=4002
|
||||
|
@@ -27,6 +27,7 @@ import org.alfresco.repo.attributes.AttributeDAO;
|
||||
import org.alfresco.repo.attributes.GlobalAttributeEntryDAO;
|
||||
import org.alfresco.repo.attributes.ListEntryDAO;
|
||||
import org.alfresco.repo.attributes.MapEntryDAO;
|
||||
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
|
||||
|
||||
/**
|
||||
* This is the (shudder) global context for AVM. It a rendezvous
|
||||
@@ -60,6 +61,7 @@ public class AVMDAOs
|
||||
|
||||
public org.alfresco.repo.domain.avm.AVMNodeDAO newAVMNodeDAO;
|
||||
public org.alfresco.repo.domain.avm.AVMNodeLinksDAO newAVMNodeLinksDAO;
|
||||
public ContentDataDAO contentDataDAO;
|
||||
|
||||
/**
|
||||
* The AVMStore DAO.
|
||||
@@ -123,6 +125,11 @@ public class AVMDAOs
|
||||
this.newAVMNodeLinksDAO = newAVMNodeLinksDAO;
|
||||
}
|
||||
|
||||
public void setContentDataDAO(ContentDataDAO contentDataDAO)
|
||||
{
|
||||
this.contentDataDAO = contentDataDAO;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param childEntryDAO the fChildEntryDAO to set
|
||||
*/
|
||||
|
@@ -1900,7 +1900,9 @@ public class AVMStoreImpl implements AVMStore
|
||||
throw new AccessDeniedException("Not allowed to write properties: " + path);
|
||||
}
|
||||
PlainFileNode file = (PlainFileNode)node;
|
||||
file.setEncoding(encoding);
|
||||
ContentData contentData = file.getContentData();
|
||||
contentData = ContentData.setEncoding(contentData, encoding);
|
||||
file.setContentData(contentData);
|
||||
|
||||
AVMDAOs.Instance().fAVMNodeDAO.update(file);
|
||||
}
|
||||
@@ -1925,7 +1927,9 @@ public class AVMStoreImpl implements AVMStore
|
||||
throw new AccessDeniedException("Not allowed to write properties: " + path);
|
||||
}
|
||||
PlainFileNode file = (PlainFileNode)node;
|
||||
file.setMimeType(mimeType);
|
||||
ContentData contentData = file.getContentData();
|
||||
contentData = ContentData.setMimetype(contentData, mimeType);
|
||||
file.setContentData(contentData);
|
||||
|
||||
AVMDAOs.Instance().fAVMNodeDAO.update(file);
|
||||
}
|
||||
|
@@ -329,16 +329,20 @@ public class OrphanReaper
|
||||
// First get rid of all child entries for the node.
|
||||
AVMDAOs.Instance().fChildEntryDAO.deleteByParent(node);
|
||||
}
|
||||
// This is not on, since content urls can be shared.
|
||||
// else if (node.getType() == AVMNodeType.PLAIN_FILE)
|
||||
// {
|
||||
// PlainFileNode file = (PlainFileNode)node;
|
||||
// String url = file.getContentData(null).getContentUrl();
|
||||
// if (url != null)
|
||||
// {
|
||||
// RawServices.Instance().getContentStore().delete(url);
|
||||
// }
|
||||
// }
|
||||
else if (node.getType() == AVMNodeType.PLAIN_FILE)
|
||||
{
|
||||
PlainFileNode file = (PlainFileNode)node;
|
||||
if (!file.isLegacyContentData())
|
||||
{
|
||||
Long contentDataId = file.getContentDataId();
|
||||
if (contentDataId != null)
|
||||
{
|
||||
// The ContentDataDAO will take care of dereferencing and cleanup
|
||||
AVMDAOs.Instance().contentDataDAO.deleteContentData(contentDataId);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Finally, delete it
|
||||
AVMDAOs.Instance().fAVMNodeDAO.delete(node);
|
||||
}
|
||||
return null;
|
||||
|
@@ -8,21 +8,26 @@ import org.alfresco.service.cmr.repository.ContentData;
|
||||
*/
|
||||
public interface PlainFileNode extends FileNode
|
||||
{
|
||||
/**
|
||||
* Set the encoding of this file.
|
||||
* @param encoding
|
||||
*/
|
||||
public void setEncoding(String encoding);
|
||||
|
||||
/**
|
||||
* Set the mime type of this file.
|
||||
* @param mimeType
|
||||
*/
|
||||
public void setMimeType(String mimeType);
|
||||
|
||||
/**
|
||||
* Special case.
|
||||
* @return
|
||||
*/
|
||||
public ContentData getContentData();
|
||||
public void setContentData(ContentData contentData);
|
||||
|
||||
public boolean isLegacyContentData();
|
||||
public Long getContentDataId();
|
||||
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public String getContentURL();
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public String getMimeType();
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public String getEncoding();
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public long getLength();
|
||||
}
|
||||
|
@@ -20,24 +20,21 @@
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing" */
|
||||
|
||||
package org.alfresco.repo.avm;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.avm.util.RawServices;
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.domain.DbAccessControlList;
|
||||
import org.alfresco.repo.domain.PropertyValue;
|
||||
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
|
||||
import org.alfresco.repo.security.permissions.ACLCopyMode;
|
||||
import org.alfresco.service.cmr.avm.AVMException;
|
||||
import org.alfresco.service.cmr.avm.AVMNodeDescriptor;
|
||||
import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
|
||||
|
||||
/**
|
||||
* A plain old file. Contains a Content object.
|
||||
* @author britt
|
||||
@@ -46,25 +43,28 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
{
|
||||
static final long serialVersionUID = 8720376837929735294L;
|
||||
|
||||
private static final String PREFIX_CONTENT_DATA_ID = "id:";
|
||||
private static final String SUFFIX_CONTENT_DATA_NULL = "null";
|
||||
|
||||
/**
|
||||
* The Content URL.
|
||||
* The content URL <b>OR</b> the ID of the ContentData entity
|
||||
*/
|
||||
private String fContentURL;
|
||||
private String contentURL;
|
||||
|
||||
/**
|
||||
* The Mime type.
|
||||
*/
|
||||
private String fMimeType;
|
||||
private String mimeType;
|
||||
|
||||
/**
|
||||
* The character encoding.
|
||||
*/
|
||||
private String fEncoding;
|
||||
private String encoding;
|
||||
|
||||
/**
|
||||
* The length of the file.
|
||||
*/
|
||||
private long fLength;
|
||||
private long length;
|
||||
|
||||
/**
|
||||
* Default constructor.
|
||||
@@ -171,7 +171,6 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
* @param lPath The Lookup.
|
||||
* @return A diagnostic String representation.
|
||||
*/
|
||||
// @Override
|
||||
public String toString(Lookup lPath)
|
||||
{
|
||||
return "[PF:" + getId() + "]";
|
||||
@@ -194,6 +193,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
{
|
||||
path = path + "/" + name;
|
||||
}
|
||||
ContentData contentData = getContentData();
|
||||
return new AVMNodeDescriptor(path,
|
||||
name,
|
||||
AVMNodeType.PLAIN_FILE,
|
||||
@@ -211,7 +211,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
false,
|
||||
-1,
|
||||
false,
|
||||
getLength(),
|
||||
contentData == null ? 0L : contentData.getSize(),
|
||||
-1);
|
||||
}
|
||||
|
||||
@@ -224,6 +224,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
{
|
||||
BasicAttributes attrs = getBasicAttributes();
|
||||
String path = lPath.getRepresentedPath();
|
||||
ContentData contentData = getContentData();
|
||||
return new AVMNodeDescriptor(path,
|
||||
path.substring(path.lastIndexOf("/") + 1),
|
||||
AVMNodeType.PLAIN_FILE,
|
||||
@@ -241,7 +242,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
false,
|
||||
-1,
|
||||
false,
|
||||
getFileLength(),
|
||||
contentData == null ? 0L : contentData.getSize(),
|
||||
-1);
|
||||
}
|
||||
|
||||
@@ -256,6 +257,7 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
{
|
||||
BasicAttributes attrs = getBasicAttributes();
|
||||
String path = parentPath.endsWith("/") ? parentPath + name : parentPath + "/" + name;
|
||||
ContentData contentData = getContentData();
|
||||
return new AVMNodeDescriptor(path,
|
||||
name,
|
||||
AVMNodeType.PLAIN_FILE,
|
||||
@@ -273,94 +275,72 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
false,
|
||||
-1,
|
||||
false,
|
||||
getFileLength(),
|
||||
contentData == null ? 0L : contentData.getSize(),
|
||||
-1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Content URL.
|
||||
* @return The content URL.
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public String getContentURL()
|
||||
{
|
||||
return fContentURL;
|
||||
return contentURL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the Content URL.
|
||||
* @param contentURL
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
protected void setContentURL(String contentURL)
|
||||
public void setContentURL(String contentURL)
|
||||
{
|
||||
fContentURL = contentURL;
|
||||
this.contentURL = contentURL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the character encoding.
|
||||
* @return The encoding.
|
||||
*/
|
||||
public String getEncoding()
|
||||
{
|
||||
return fEncoding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the character encoding.
|
||||
* @param encoding The encoding to set.
|
||||
*/
|
||||
public void setEncoding(String encoding)
|
||||
{
|
||||
fEncoding = encoding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file length.
|
||||
* @return The file length or null if unknown.
|
||||
*/
|
||||
public long getLength()
|
||||
{
|
||||
return fLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual file length.
|
||||
* @return The actual file length;
|
||||
*/
|
||||
private long getFileLength()
|
||||
{
|
||||
if (getContentURL() == null)
|
||||
{
|
||||
return 0L;
|
||||
}
|
||||
ContentReader reader = RawServices.Instance().getContentStore().getReader(getContentURL());
|
||||
return reader.getSize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the file length.
|
||||
* @param length The length of the file.
|
||||
*/
|
||||
protected void setLength(long length)
|
||||
{
|
||||
fLength = length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mime type of the content.
|
||||
* @return The Mime Type of the content.
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public String getMimeType()
|
||||
{
|
||||
return fMimeType;
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the Mime Type of the content.
|
||||
* @param mimeType The Mime Type to set.
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public void setMimeType(String mimeType)
|
||||
{
|
||||
fMimeType = mimeType;
|
||||
this.mimeType = mimeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public String getEncoding()
|
||||
{
|
||||
return encoding;
|
||||
}
|
||||
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public void setEncoding(String encoding)
|
||||
{
|
||||
this.encoding = encoding;
|
||||
}
|
||||
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public long getLength()
|
||||
{
|
||||
return length;
|
||||
}
|
||||
|
||||
/**
|
||||
* DAO accessor only. <b>DO NOT USE</b> in code.
|
||||
*/
|
||||
public void setLength(long length)
|
||||
{
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -369,14 +349,51 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
*/
|
||||
public void setContentData(ContentData contentData)
|
||||
{
|
||||
setContentURL(contentData.getContentUrl());
|
||||
setMimeType(contentData.getMimetype());
|
||||
if (getMimeType() == null)
|
||||
// Remove any legacy-stored attributes to avoid confusion
|
||||
if (isLegacyContentData())
|
||||
{
|
||||
throw new AVMException("Null mime type.");
|
||||
// Wipe over the old values
|
||||
contentURL = PREFIX_CONTENT_DATA_ID + SUFFIX_CONTENT_DATA_NULL;
|
||||
encoding = null;
|
||||
length = 0L;
|
||||
mimeType = null;
|
||||
}
|
||||
|
||||
Long oldContentDataId = getContentDataId();
|
||||
Long newContentDataId = null;
|
||||
if (oldContentDataId == null)
|
||||
{
|
||||
if (contentData != null)
|
||||
{
|
||||
// There was no reference before, so just create a new one
|
||||
newContentDataId = AVMDAOs.Instance().contentDataDAO.createContentData(contentData).getFirst();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (contentData != null)
|
||||
{
|
||||
// Update it. The ID will remain the same.
|
||||
AVMDAOs.Instance().contentDataDAO.updateContentData(oldContentDataId, contentData);
|
||||
newContentDataId = oldContentDataId;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Delete the old instance
|
||||
AVMDAOs.Instance().contentDataDAO.deleteContentData(oldContentDataId);
|
||||
newContentDataId = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Set the pointer to the ContentData instance
|
||||
if (newContentDataId == null)
|
||||
{
|
||||
contentURL = PREFIX_CONTENT_DATA_ID + SUFFIX_CONTENT_DATA_NULL;
|
||||
}
|
||||
else
|
||||
{
|
||||
contentURL = PREFIX_CONTENT_DATA_ID + newContentDataId;
|
||||
}
|
||||
setEncoding(contentData.getEncoding());
|
||||
setLength(contentData.getSize());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -389,12 +406,66 @@ public class PlainFileNodeImpl extends FileNodeImpl implements PlainFileNode
|
||||
return getContentData();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.avm.PlainFileNode#getContentData()
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
* <p/>
|
||||
* If the content URL contains the special prefix, <b>{@link PREFIX_CONTENT_DATA_ID}</b>,
|
||||
* then the data is pulled directly from the {@link ContentDataDAO}.
|
||||
*/
|
||||
public ContentData getContentData()
|
||||
{
|
||||
return new ContentData(getContentURL(), getMimeType(), getLength(), getEncoding());
|
||||
if (contentURL != null && contentURL.startsWith(PREFIX_CONTENT_DATA_ID))
|
||||
{
|
||||
Long contentDataId = getContentDataId();
|
||||
try
|
||||
{
|
||||
return AVMDAOs.Instance().contentDataDAO.getContentData(contentDataId).getSecond();
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException(
|
||||
"AVM File node " + getId() + " has invalid ContentData id reference " + contentDataId,
|
||||
e);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// This deals with legacy data
|
||||
return new ContentData(contentURL, mimeType, length, encoding);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the content URL and if it contains the {@link #PREFIX_CONTENT_DATA_ID prefix}
|
||||
* indicating the an new ContentData storage ID, returns <tt>true</tt>.
|
||||
*/
|
||||
public boolean isLegacyContentData()
|
||||
{
|
||||
return (contentURL == null || !contentURL.startsWith(PREFIX_CONTENT_DATA_ID));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ID of the ContentData as given by the string in the ContentURL of
|
||||
* form <b>ID:12345</b>
|
||||
*/
|
||||
public Long getContentDataId()
|
||||
{
|
||||
String idStr = contentURL.substring(3);
|
||||
if (idStr.equals(SUFFIX_CONTENT_DATA_NULL))
|
||||
{
|
||||
// Nothing has been stored against this file
|
||||
return null;
|
||||
}
|
||||
try
|
||||
{
|
||||
return Long.parseLong(idStr);
|
||||
}
|
||||
catch (Throwable e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException(
|
||||
"AVM File node " + getId() + " has malformed ContentData id reference " + idStr,
|
||||
e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -20,7 +20,6 @@
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing" */
|
||||
|
||||
package org.alfresco.repo.avm.ibatis;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -56,7 +55,6 @@ import org.alfresco.repo.domain.PropertyValue;
|
||||
import org.alfresco.repo.domain.avm.AVMNodeEntity;
|
||||
import org.alfresco.repo.domain.avm.AVMVersionRootEntity;
|
||||
import org.alfresco.repo.domain.hibernate.DbAccessControlListImpl;
|
||||
import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
|
||||
|
||||
@@ -68,7 +66,6 @@ import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
|
||||
*/
|
||||
class AVMNodeDAOIbatis extends HibernateDaoSupport implements AVMNodeDAO
|
||||
{
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.avm.AVMNodeDAO#save(org.alfresco.repo.avm.AVMNode)
|
||||
*/
|
||||
@@ -380,10 +377,10 @@ class AVMNodeDAOIbatis extends HibernateDaoSupport implements AVMNodeDAO
|
||||
if (node instanceof PlainFileNode)
|
||||
{
|
||||
PlainFileNode pfNode = (PlainFileNode)node;
|
||||
nodeEntity.setEncoding(pfNode.getContentData().getEncoding());
|
||||
nodeEntity.setMimetype(pfNode.getContentData().getMimetype());
|
||||
nodeEntity.setContentUrl(pfNode.getContentData().getContentUrl());
|
||||
nodeEntity.setLength(pfNode.getContentData().getSize());
|
||||
nodeEntity.setEncoding(pfNode.getEncoding());
|
||||
nodeEntity.setLength(pfNode.getLength());
|
||||
nodeEntity.setMimetype(pfNode.getMimeType());
|
||||
nodeEntity.setContentUrl(pfNode.getContentURL());
|
||||
}
|
||||
else if (node instanceof LayeredFileNode)
|
||||
{
|
||||
@@ -429,9 +426,11 @@ class AVMNodeDAOIbatis extends HibernateDaoSupport implements AVMNodeDAO
|
||||
if (nodeEntity.getType() == AVMNodeType.PLAIN_FILE)
|
||||
{
|
||||
node = new PlainFileNodeImpl();
|
||||
|
||||
ContentData cd = new ContentData(nodeEntity.getContentUrl(), nodeEntity.getMimetype(), nodeEntity.getLength(), nodeEntity.getEncoding());
|
||||
((PlainFileNodeImpl)node).setContentData(cd);
|
||||
PlainFileNodeImpl pfNode = (PlainFileNodeImpl) node;
|
||||
pfNode.setMimeType(nodeEntity.getMimetype());
|
||||
pfNode.setEncoding(nodeEntity.getEncoding());
|
||||
pfNode.setLength(nodeEntity.getLength());
|
||||
pfNode.setContentURL(nodeEntity.getContentUrl());
|
||||
}
|
||||
else if (nodeEntity.getType() == AVMNodeType.PLAIN_DIRECTORY)
|
||||
{
|
||||
|
@@ -37,6 +37,7 @@ import org.alfresco.repo.avm.AVMNodeConverter;
|
||||
import org.alfresco.repo.content.ContentServicePolicies.OnContentPropertyUpdatePolicy;
|
||||
import org.alfresco.repo.content.ContentServicePolicies.OnContentReadPolicy;
|
||||
import org.alfresco.repo.content.ContentServicePolicies.OnContentUpdatePolicy;
|
||||
import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner;
|
||||
import org.alfresco.repo.content.filestore.FileContentStore;
|
||||
import org.alfresco.repo.content.transform.ContentTransformer;
|
||||
import org.alfresco.repo.content.transform.ContentTransformerRegistry;
|
||||
@@ -63,7 +64,6 @@ import org.alfresco.service.cmr.repository.TransformationOptions;
|
||||
import org.alfresco.service.cmr.usage.ContentQuotaException;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.EqualsHelper;
|
||||
import org.springframework.extensions.surf.util.Pair;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
@@ -71,7 +71,6 @@ import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
|
||||
|
||||
/**
|
||||
* Service implementation acting as a level of indirection between the client
|
||||
* and the underlying content store.
|
||||
@@ -95,6 +94,8 @@ public class ContentServiceImpl implements ContentService
|
||||
|
||||
/** a registry of all available content transformers */
|
||||
private ContentTransformerRegistry transformerRegistry;
|
||||
/** The cleaner that will ensure that rollbacks clean up after themselves */
|
||||
private EagerContentStoreCleaner eagerContentStoreCleaner;
|
||||
/** the store to use. Any multi-store support is provided by the store implementation. */
|
||||
private ContentStore store;
|
||||
/** the store for all temporarily created content */
|
||||
@@ -113,14 +114,6 @@ public class ContentServiceImpl implements ContentService
|
||||
ClassPolicyDelegate<ContentServicePolicies.OnContentPropertyUpdatePolicy> onContentPropertyUpdateDelegate;
|
||||
ClassPolicyDelegate<ContentServicePolicies.OnContentReadPolicy> onContentReadDelegate;
|
||||
|
||||
/**
|
||||
* @deprecated Replaced by {@link #setRetryingTransactionHelper(RetryingTransactionHelper)}
|
||||
*/
|
||||
public void setTransactionService(TransactionService transactionService)
|
||||
{
|
||||
logger.warn("Property 'transactionService' has been replaced by 'retryingTransactionHelper'.");
|
||||
}
|
||||
|
||||
public void setRetryingTransactionHelper(RetryingTransactionHelper helper)
|
||||
{
|
||||
this.transactionHelper = helper;
|
||||
@@ -141,6 +134,11 @@ public class ContentServiceImpl implements ContentService
|
||||
this.transformerRegistry = transformerRegistry;
|
||||
}
|
||||
|
||||
public void setEagerContentStoreCleaner(EagerContentStoreCleaner eagerContentStoreCleaner)
|
||||
{
|
||||
this.eagerContentStoreCleaner = eagerContentStoreCleaner;
|
||||
}
|
||||
|
||||
public void setStore(ContentStore store)
|
||||
{
|
||||
this.store = store;
|
||||
@@ -428,6 +426,8 @@ public class ContentServiceImpl implements ContentService
|
||||
ContentContext ctx = new ContentContext(null, null);
|
||||
// for this case, we just give back a valid URL into the content store
|
||||
ContentWriter writer = store.getWriter(ctx);
|
||||
// Register the new URL for rollback cleanup
|
||||
eagerContentStoreCleaner.registerNewContentUrl(writer.getContentUrl());
|
||||
// done
|
||||
return writer;
|
||||
}
|
||||
@@ -439,6 +439,8 @@ public class ContentServiceImpl implements ContentService
|
||||
// can be wherever the store decides.
|
||||
ContentContext ctx = new NodeContentContext(existingContentReader, null, nodeRef, propertyQName);
|
||||
ContentWriter writer = store.getWriter(ctx);
|
||||
// Register the new URL for rollback cleanup
|
||||
eagerContentStoreCleaner.registerNewContentUrl(writer.getContentUrl());
|
||||
|
||||
// Special case for AVM repository.
|
||||
Serializable contentValue = null;
|
||||
|
@@ -24,39 +24,32 @@
|
||||
*/
|
||||
package org.alfresco.repo.content.cleanup;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.domain.avm.AVMNodeDAO;
|
||||
import org.alfresco.repo.content.ContentStore;
|
||||
import org.alfresco.repo.domain.contentclean.ContentCleanDAO;
|
||||
import org.alfresco.repo.domain.contentclean.ContentCleanDAO.ContentUrlBatchProcessor;
|
||||
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
|
||||
import org.alfresco.repo.domain.contentdata.ContentDataDAO.ContentUrlHandler;
|
||||
import org.alfresco.repo.lock.JobLockService;
|
||||
import org.alfresco.repo.node.db.NodeDaoService;
|
||||
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||
import org.alfresco.service.cmr.dictionary.DictionaryService;
|
||||
import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.alfresco.service.cmr.repository.ContentService;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.springframework.extensions.surf.util.PropertyCheck;
|
||||
import org.alfresco.util.VmShutdownListener;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.extensions.surf.util.Pair;
|
||||
import org.springframework.extensions.surf.util.PropertyCheck;
|
||||
|
||||
/**
|
||||
* This component is responsible cleaning up orphaned content.
|
||||
* <p/>
|
||||
* <b>TODO: Fix up new comments</b>
|
||||
*
|
||||
* Clean-up happens at two levels.<p/>
|
||||
* <u><b>Eager cleanup:</b></u> (since 3.2)<p/>
|
||||
* If {@link #setEagerOrphanCleanup(boolean) eager cleanup} is activated, then this
|
||||
@@ -88,30 +81,38 @@ import org.apache.commons.logging.LogFactory;
|
||||
*/
|
||||
public class ContentStoreCleaner
|
||||
{
|
||||
private static final QName LOCK_QNAME = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentStoreCleaner");
|
||||
private static final long LOCK_TTL = 30000L;
|
||||
private static ThreadLocal<Pair<Long, String>> lockThreadLocal = new ThreadLocal<Pair<Long, String>>();
|
||||
|
||||
private static Log logger = LogFactory.getLog(ContentStoreCleaner.class);
|
||||
|
||||
/** kept to notify the thread that it should quit */
|
||||
private static VmShutdownListener vmShutdownListener = new VmShutdownListener("ContentStoreCleaner");
|
||||
|
||||
private EagerContentStoreCleaner eagerContentStoreCleaner;
|
||||
private JobLockService jobLockService;
|
||||
private ContentCleanDAO contentCleanDAO;
|
||||
private ContentDataDAO contentDataDAO;
|
||||
private DictionaryService dictionaryService;
|
||||
private ContentService contentService;
|
||||
private NodeDaoService nodeDaoService;
|
||||
private AVMNodeDAO avmNodeDAO;
|
||||
private TransactionService transactionService;
|
||||
private List<ContentStore> stores;
|
||||
private List<ContentStoreCleanerListener> listeners;
|
||||
private int protectDays;
|
||||
|
||||
public ContentStoreCleaner()
|
||||
{
|
||||
this.stores = new ArrayList<ContentStore>(0);
|
||||
this.listeners = new ArrayList<ContentStoreCleanerListener>(0);
|
||||
this.protectDays = 7;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the component that will do the physical deleting
|
||||
*/
|
||||
public void setEagerContentStoreCleaner(EagerContentStoreCleaner eagerContentStoreCleaner)
|
||||
{
|
||||
this.eagerContentStoreCleaner = eagerContentStoreCleaner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param jobLockService service used to ensure that cleanup runs are not duplicated
|
||||
*/
|
||||
@@ -120,14 +121,6 @@ public class ContentStoreCleaner
|
||||
this.jobLockService = jobLockService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param contentCleanDAO DAO used for manipulating content URLs
|
||||
*/
|
||||
public void setContentCleanDAO(ContentCleanDAO contentCleanDAO)
|
||||
{
|
||||
this.contentCleanDAO = contentCleanDAO;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param contentDataDAO DAO used for enumerating DM content URLs
|
||||
*/
|
||||
@@ -176,22 +169,6 @@ public class ContentStoreCleaner
|
||||
this.transactionService = transactionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param stores the content stores to clean
|
||||
*/
|
||||
public void setStores(List<ContentStore> stores)
|
||||
{
|
||||
this.stores = stores;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param listeners the listeners that can react to deletions
|
||||
*/
|
||||
public void setListeners(List<ContentStoreCleanerListener> listeners)
|
||||
{
|
||||
this.listeners = listeners;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum number of days old that orphaned content must be
|
||||
* before deletion is possible. The default is 7 days.
|
||||
@@ -217,14 +194,13 @@ public class ContentStoreCleaner
|
||||
private void checkProperties()
|
||||
{
|
||||
PropertyCheck.mandatory(this, "jobLockService", jobLockService);
|
||||
PropertyCheck.mandatory(this, "contentCleanerDAO", contentCleanDAO);
|
||||
PropertyCheck.mandatory(this, "contentDataDAO", contentDataDAO);
|
||||
PropertyCheck.mandatory(this, "dictionaryService", dictionaryService);
|
||||
PropertyCheck.mandatory(this, "contentService", contentService);
|
||||
PropertyCheck.mandatory(this, "nodeDaoService", nodeDaoService);
|
||||
PropertyCheck.mandatory(this, "avmNodeDAO", avmNodeDAO);
|
||||
PropertyCheck.mandatory(this, "transactionService", transactionService);
|
||||
PropertyCheck.mandatory(this, "listeners", listeners);
|
||||
PropertyCheck.mandatory(this, "eagerContentStoreCleaner", eagerContentStoreCleaner);
|
||||
|
||||
// check the protect days
|
||||
if (protectDays < 0)
|
||||
@@ -235,156 +211,70 @@ public class ContentStoreCleaner
|
||||
{
|
||||
logger.warn(
|
||||
"Property 'protectDays' is set to 0. " +
|
||||
"It is possible that in-transaction content will be deleted.");
|
||||
"Please ensure that your backup strategy is appropriate for this setting.");
|
||||
}
|
||||
}
|
||||
|
||||
private void removeContentUrlsPresentInMetadata(final ContentUrlBatchProcessor urlRemover)
|
||||
/**
|
||||
* Lazily update the job lock
|
||||
*/
|
||||
private void refreshLock()
|
||||
{
|
||||
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
|
||||
|
||||
// Remove all the Content URLs for the ADM repository
|
||||
// Handlers that record the URLs
|
||||
final ContentDataDAO.ContentUrlHandler contentUrlHandler = new ContentDataDAO.ContentUrlHandler()
|
||||
Pair<Long, String> lockPair = lockThreadLocal.get();
|
||||
if (lockPair == null)
|
||||
{
|
||||
long lastLock = 0L;
|
||||
public void handle(String contentUrl)
|
||||
{
|
||||
if (vmShutdownListener.isVmShuttingDown())
|
||||
{
|
||||
throw new VmShutdownException();
|
||||
String lockToken = jobLockService.getLock(LOCK_QNAME, LOCK_TTL);
|
||||
Long lastLock = new Long(System.currentTimeMillis());
|
||||
// We have not locked before
|
||||
lockPair = new Pair<Long, String>(lastLock, lockToken);
|
||||
lockThreadLocal.set(lockPair);
|
||||
}
|
||||
urlRemover.processContentUrl(contentUrl);
|
||||
// Check lock
|
||||
else
|
||||
{
|
||||
long now = System.currentTimeMillis();
|
||||
long lastLock = lockPair.getFirst().longValue();
|
||||
String lockToken = lockPair.getSecond();
|
||||
// Only refresh the lock if we are past a threshold
|
||||
if (now - lastLock > (long)(LOCK_TTL/2L))
|
||||
{
|
||||
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
|
||||
lastLock = now;
|
||||
jobLockService.refreshLock(lockToken, LOCK_QNAME, LOCK_TTL);
|
||||
lastLock = System.currentTimeMillis();
|
||||
lockPair = new Pair<Long, String>(lastLock, lockToken);
|
||||
}
|
||||
}
|
||||
};
|
||||
final NodePropertyHandler nodePropertyHandler = new NodePropertyHandler()
|
||||
{
|
||||
long lastLock = 0L;
|
||||
public void handle(NodeRef nodeRef, QName nodeTypeQName, QName propertyQName, Serializable value)
|
||||
{
|
||||
if (vmShutdownListener.isVmShuttingDown())
|
||||
{
|
||||
throw new VmShutdownException();
|
||||
}
|
||||
// Convert the values to ContentData and extract the URLs
|
||||
ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value);
|
||||
String contentUrl = contentData.getContentUrl();
|
||||
if (contentUrl != null)
|
||||
{
|
||||
urlRemover.processContentUrl(contentUrl);
|
||||
}
|
||||
// Check lock
|
||||
long now = System.currentTimeMillis();
|
||||
if (now - lastLock > (long)(LOCK_TTL/2L))
|
||||
{
|
||||
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
|
||||
lastLock = now;
|
||||
}
|
||||
}
|
||||
};
|
||||
final DataTypeDefinition contentDataType = dictionaryService.getDataType(DataTypeDefinition.CONTENT);
|
||||
// execute in READ-WRITE txn
|
||||
RetryingTransactionCallback<Void> getUrlsCallback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Exception
|
||||
{
|
||||
contentDataDAO.getAllContentUrls(contentUrlHandler);
|
||||
nodeDaoService.getPropertyValuesByActualType(contentDataType, nodePropertyHandler);
|
||||
return null;
|
||||
};
|
||||
};
|
||||
txnHelper.doInTransaction(getUrlsCallback);
|
||||
|
||||
// Do the same for the AVM repository.
|
||||
final AVMNodeDAO.ContentUrlHandler handler = new AVMNodeDAO.ContentUrlHandler()
|
||||
{
|
||||
long lastLock = 0L;
|
||||
public void handle(String contentUrl)
|
||||
{
|
||||
if (vmShutdownListener.isVmShuttingDown())
|
||||
{
|
||||
throw new VmShutdownException();
|
||||
}
|
||||
urlRemover.processContentUrl(contentUrl);
|
||||
// Check lock
|
||||
long now = System.currentTimeMillis();
|
||||
if (now - lastLock > (long)(LOCK_TTL/2L))
|
||||
{
|
||||
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
|
||||
lastLock = now;
|
||||
}
|
||||
}
|
||||
};
|
||||
// execute in READ-WRITE txn
|
||||
RetryingTransactionCallback<Void> getAVMUrlsCallback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Exception
|
||||
{
|
||||
avmNodeDAO.getContentUrls(handler);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
txnHelper.doInTransaction(getAVMUrlsCallback);
|
||||
}
|
||||
|
||||
private void addContentUrlsPresentInStores(final ContentUrlBatchProcessor urlInserter)
|
||||
{
|
||||
org.alfresco.repo.content.ContentStore.ContentUrlHandler handler = new org.alfresco.repo.content.ContentStore.ContentUrlHandler()
|
||||
{
|
||||
long lastLock = 0L;
|
||||
public void handle(String contentUrl)
|
||||
{
|
||||
if (vmShutdownListener.isVmShuttingDown())
|
||||
{
|
||||
throw new VmShutdownException();
|
||||
}
|
||||
urlInserter.processContentUrl(contentUrl);
|
||||
// Check lock
|
||||
long now = System.currentTimeMillis();
|
||||
if (now - lastLock > (long)(LOCK_TTL/2L))
|
||||
{
|
||||
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
|
||||
lastLock = now;
|
||||
}
|
||||
}
|
||||
};
|
||||
Date checkAllBeforeDate = new Date(System.currentTimeMillis() - (long) protectDays * 3600L * 1000L * 24L);
|
||||
for (ContentStore store : stores)
|
||||
{
|
||||
store.getUrls(null, checkAllBeforeDate, handler);
|
||||
}
|
||||
}
|
||||
|
||||
private static final QName LOCK_QNAME = QName.createQName(NamespaceService.SYSTEM_MODEL_1_0_URI, "ContentStoreCleaner");
|
||||
private static final long LOCK_TTL = 30000L;
|
||||
/**
|
||||
* Release the lock after the job completes
|
||||
*/
|
||||
private void releaseLock()
|
||||
{
|
||||
Pair<Long, String> lockPair = lockThreadLocal.get();
|
||||
if (lockPair != null)
|
||||
{
|
||||
// We can't release without a token
|
||||
try
|
||||
{
|
||||
jobLockService.releaseLock(lockPair.getSecond(), LOCK_QNAME);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Reset
|
||||
lockThreadLocal.set(null);
|
||||
}
|
||||
}
|
||||
// else: We can't release without a token
|
||||
}
|
||||
|
||||
public void execute()
|
||||
{
|
||||
checkProperties();
|
||||
|
||||
RetryingTransactionCallback<Void> executeCallback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Exception
|
||||
{
|
||||
logger.debug("Content store cleanup started.");
|
||||
// Get the lock without any waiting
|
||||
// The lock will be refreshed, but the first lock starts the process
|
||||
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
|
||||
executeInternal();
|
||||
return null;
|
||||
}
|
||||
};
|
||||
try
|
||||
{
|
||||
RetryingTransactionHelper txnHelper = transactionService.getRetryingTransactionHelper();
|
||||
txnHelper.setMaxRetries(0);
|
||||
txnHelper.doInTransaction(executeCallback);
|
||||
logger.debug("Content store cleanup started.");
|
||||
refreshLock();
|
||||
executeInternal();
|
||||
// Done
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
@@ -399,103 +289,67 @@ public class ContentStoreCleaner
|
||||
logger.debug(" Content store cleanup aborted.");
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
releaseLock();
|
||||
}
|
||||
}
|
||||
|
||||
public void executeInternal()
|
||||
private void executeInternal()
|
||||
{
|
||||
final ContentUrlBatchProcessor storeUrlDeleteHandler = new ContentUrlBatchProcessor()
|
||||
// execute in READ-WRITE txn
|
||||
RetryingTransactionCallback<Integer> getAndDeleteWork = new RetryingTransactionCallback<Integer>()
|
||||
{
|
||||
long lastLock = 0L;
|
||||
public void start()
|
||||
public Integer execute() throws Exception
|
||||
{
|
||||
}
|
||||
public void processContentUrl(String contentUrl)
|
||||
{
|
||||
for (ContentStore store : stores)
|
||||
return cleanBatch(1000);
|
||||
};
|
||||
};
|
||||
while (true)
|
||||
{
|
||||
refreshLock();
|
||||
Integer deleted = transactionService.getRetryingTransactionHelper().doInTransaction(getAndDeleteWork);
|
||||
if (vmShutdownListener.isVmShuttingDown())
|
||||
{
|
||||
throw new VmShutdownException();
|
||||
}
|
||||
if (deleted.intValue() == 0)
|
||||
{
|
||||
// There is no more to process
|
||||
break;
|
||||
}
|
||||
// There is still more to delete, so continue
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
if (store.isWriteSupported())
|
||||
{
|
||||
logger.debug(" Deleting content URL: " + contentUrl);
|
||||
logger.debug(" Removed " + deleted.intValue() + " orphaned content URLs.");
|
||||
}
|
||||
}
|
||||
for (ContentStoreCleanerListener listener : listeners)
|
||||
{
|
||||
listener.beforeDelete(store, contentUrl);
|
||||
}
|
||||
// Delete
|
||||
store.delete(contentUrl);
|
||||
// Check lock
|
||||
long now = System.currentTimeMillis();
|
||||
if (now - lastLock > (long)(LOCK_TTL/2L))
|
||||
{
|
||||
jobLockService.getTransactionalLock(LOCK_QNAME, LOCK_TTL);
|
||||
lastLock = now;
|
||||
}
|
||||
}
|
||||
}
|
||||
public void end()
|
||||
{
|
||||
}
|
||||
};
|
||||
// execute in READ-WRITE txn
|
||||
RetryingTransactionCallback<Void> executeCallback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Exception
|
||||
{
|
||||
// Clean up
|
||||
contentCleanDAO.cleanUp();
|
||||
// Push all store URLs in
|
||||
ContentUrlBatchProcessor urlInserter = contentCleanDAO.getUrlInserter();
|
||||
try
|
||||
{
|
||||
urlInserter.start();
|
||||
addContentUrlsPresentInStores(urlInserter);
|
||||
}
|
||||
finally
|
||||
{
|
||||
urlInserter.end();
|
||||
}
|
||||
// Delete all content URLs
|
||||
ContentUrlBatchProcessor urlRemover = contentCleanDAO.getUrlRemover();
|
||||
try
|
||||
{
|
||||
urlRemover.start();
|
||||
removeContentUrlsPresentInMetadata(urlRemover);
|
||||
}
|
||||
finally
|
||||
{
|
||||
urlRemover.end();
|
||||
}
|
||||
// Any remaining URLs are URls present in the stores but not in the metadata
|
||||
contentCleanDAO.listAllUrls(storeUrlDeleteHandler);
|
||||
// Clean up
|
||||
contentCleanDAO.cleanUp();
|
||||
return null;
|
||||
};
|
||||
};
|
||||
try
|
||||
{
|
||||
transactionService.getRetryingTransactionHelper().doInTransaction(executeCallback);
|
||||
// Done
|
||||
if (logger.isDebugEnabled())
|
||||
}
|
||||
|
||||
private int cleanBatch(final int batchSize)
|
||||
{
|
||||
logger.debug(" Content store cleanup completed.");
|
||||
}
|
||||
}
|
||||
catch (VmShutdownException e)
|
||||
final List<Long> idsToDelete = new ArrayList<Long>(batchSize);
|
||||
ContentUrlHandler contentUrlHandler = new ContentUrlHandler()
|
||||
{
|
||||
// Aborted
|
||||
if (logger.isDebugEnabled())
|
||||
public void handle(Long id, String contentUrl, Long orphanTime)
|
||||
{
|
||||
logger.debug(" Content store cleanup aborted.");
|
||||
// Pass the content URL to the eager cleaner for post-commit handling
|
||||
eagerContentStoreCleaner.registerOrphanedContentUrl(contentUrl, true);
|
||||
idsToDelete.add(id);
|
||||
}
|
||||
};
|
||||
final long maxOrphanTime = System.currentTimeMillis() - (protectDays * 24 * 3600 * 1000);
|
||||
contentDataDAO.getContentUrlsOrphaned(contentUrlHandler, maxOrphanTime, batchSize);
|
||||
// All the URLs have been passed off for eventual deletion.
|
||||
// Just delete the DB data
|
||||
int size = idsToDelete.size();
|
||||
if (size > 0)
|
||||
{
|
||||
contentDataDAO.deleteContentUrls(idsToDelete);
|
||||
}
|
||||
// Done
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -30,6 +30,7 @@ import java.lang.reflect.Method;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.content.AbstractContentStore;
|
||||
import org.alfresco.repo.content.ContentStore;
|
||||
import org.alfresco.repo.content.EmptyContentReader;
|
||||
@@ -48,11 +49,13 @@ import org.alfresco.service.cmr.repository.ContentIOException;
|
||||
import org.alfresco.service.cmr.repository.ContentReader;
|
||||
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||
import org.alfresco.service.cmr.repository.NodeRef;
|
||||
import org.alfresco.service.cmr.repository.StoreRef;
|
||||
import org.alfresco.service.namespace.NamespaceService;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.tools.Repository;
|
||||
import org.alfresco.tools.ToolException;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
import org.alfresco.util.VmShutdownListener;
|
||||
import org.apache.commons.lang.mutable.MutableInt;
|
||||
@@ -105,9 +108,9 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
|
||||
nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
|
||||
dictionaryService = (DictionaryService) ctx.getBean("dictionaryService");
|
||||
|
||||
int orphanCount = 100000;
|
||||
int orphanCount = 1000;
|
||||
|
||||
contentStore = new NullContentStore(orphanCount);
|
||||
contentStore = new NullContentStore(10000);
|
||||
|
||||
loadData(orphanCount);
|
||||
|
||||
@@ -220,11 +223,12 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
|
||||
}
|
||||
}
|
||||
};
|
||||
// We use the default cleaner, but fix it up a bit
|
||||
// We use the default cleaners, but fix them up a bit
|
||||
EagerContentStoreCleaner eagerCleaner = (EagerContentStoreCleaner) ctx.getBean("eagerContentStoreCleaner");
|
||||
eagerCleaner.setListeners(Collections.singletonList(listener));
|
||||
eagerCleaner.setStores(Collections.singletonList(contentStore));
|
||||
cleaner = (ContentStoreCleaner) ctx.getBean("contentStoreCleaner");
|
||||
cleaner.setListeners(Collections.singletonList(listener));
|
||||
cleaner.setProtectDays(0);
|
||||
cleaner.setStores(Collections.singletonList(contentStore));
|
||||
|
||||
// The cleaner has its own txns
|
||||
cleaner.execute();
|
||||
@@ -301,22 +305,10 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
|
||||
|
||||
private class HibernateHelper extends HibernateDaoSupport
|
||||
{
|
||||
private Method methodMakeNode;
|
||||
private QName dataTypeDefContent;
|
||||
private QName contentQName;
|
||||
|
||||
public HibernateHelper()
|
||||
{
|
||||
Class<HibernateHelper> clazz = HibernateHelper.class;
|
||||
try
|
||||
{
|
||||
methodMakeNode = clazz.getMethod("makeNode", new Class[] {ContentData.class});
|
||||
}
|
||||
catch (NoSuchMethodException e)
|
||||
{
|
||||
throw new RuntimeException("Failed to get methods");
|
||||
}
|
||||
dataTypeDefContent = DataTypeDefinition.CONTENT;
|
||||
contentQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "realContent");
|
||||
}
|
||||
/**
|
||||
@@ -324,17 +316,9 @@ public class ContentStoreCleanerScalabilityRunner extends Repository
|
||||
*/
|
||||
public void makeNode(ContentData contentData)
|
||||
{
|
||||
throw new UnsupportedOperationException("Fix this method up");
|
||||
// StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
|
||||
// Long nodeId = nodeDaoService.newNode(storeRef, GUID.generate(), ContentModel.TYPE_CONTENT).getFirst();
|
||||
// Node node = (Node) getHibernateTemplate().get(NodeImpl.class, nodeId);
|
||||
//
|
||||
// PropertyValue propertyValue = new PropertyValue(dataTypeDefContent, contentData);
|
||||
// node.getProperties().put(contentQName, propertyValue);
|
||||
// // persist the node
|
||||
// getHibernateTemplate().save(node);
|
||||
//
|
||||
// txnResourceInterceptor.performManualCheck(methodMakeNode, 10);
|
||||
StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
|
||||
Long nodeId = nodeDaoService.newNode(storeRef, GUID.generate(), ContentModel.TYPE_CONTENT).getFirst();
|
||||
nodeDaoService.addNodeProperty(nodeId, contentQName, contentData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -34,11 +34,9 @@ import java.util.Map;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.alfresco.model.ContentModel;
|
||||
import org.alfresco.repo.domain.avm.AVMNodeDAO;
|
||||
import org.alfresco.repo.content.ContentStore;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.filestore.FileContentStore;
|
||||
import org.alfresco.repo.domain.contentclean.ContentCleanDAO;
|
||||
import org.alfresco.repo.domain.avm.AVMNodeDAO;
|
||||
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
|
||||
import org.alfresco.repo.lock.JobLockService;
|
||||
import org.alfresco.repo.node.db.NodeDaoService;
|
||||
@@ -58,9 +56,7 @@ import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.service.transaction.TransactionService;
|
||||
import org.alfresco.util.ApplicationContextHelper;
|
||||
import org.alfresco.util.GUID;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
|
||||
/**
|
||||
* @see org.alfresco.repo.content.cleanup.ContentStoreCleaner
|
||||
@@ -95,33 +91,30 @@ public class ContentStoreCleanerTest extends TestCase
|
||||
DictionaryService dictionaryService = serviceRegistry.getDictionaryService();
|
||||
NodeDaoService nodeDaoService = (NodeDaoService) ctx.getBean("nodeDaoService");
|
||||
AVMNodeDAO avmNodeDAO = (AVMNodeDAO) ctx.getBean("newAvmNodeDAO");
|
||||
ContentCleanDAO contentCleanDAO = (ContentCleanDAO) ctx.getBean("contentCleanDAO");
|
||||
ContentDataDAO contentDataDAO = (ContentDataDAO) ctx.getBean("contentDataDAO");
|
||||
ApplicationEventPublisher applicationEventPublisher = (ApplicationEventPublisher) ctx
|
||||
.getBean("applicationEventPublisher");
|
||||
|
||||
eagerCleaner = (EagerContentStoreCleaner) ctx.getBean("eagerContentStoreCleaner");
|
||||
eagerCleaner.setEagerOrphanCleanup(false);
|
||||
|
||||
// we need a store
|
||||
store = new FileContentStore(applicationEventPublisher, TempFileProvider.getTempDir().getAbsolutePath());
|
||||
store = (ContentStore) ctx.getBean("fileContentStore");
|
||||
// and a listener
|
||||
listener = new DummyCleanerListener();
|
||||
// initialise record of deleted URLs
|
||||
deletedUrls = new ArrayList<String>(5);
|
||||
|
||||
// construct the test cleaner
|
||||
// Construct the test cleaners
|
||||
eagerCleaner = (EagerContentStoreCleaner) ctx.getBean("eagerContentStoreCleaner");
|
||||
eagerCleaner.setEagerOrphanCleanup(false);
|
||||
eagerCleaner.setStores(Collections.singletonList(store));
|
||||
eagerCleaner.setListeners(Collections.singletonList(listener));
|
||||
|
||||
cleaner = new ContentStoreCleaner();
|
||||
cleaner.setEagerContentStoreCleaner(eagerCleaner);
|
||||
cleaner.setJobLockService(jobLockService);
|
||||
cleaner.setContentCleanDAO(contentCleanDAO);
|
||||
cleaner.setContentDataDAO(contentDataDAO);
|
||||
cleaner.setTransactionService(transactionService);
|
||||
cleaner.setDictionaryService(dictionaryService);
|
||||
cleaner.setContentService(contentService);
|
||||
cleaner.setNodeDaoService(nodeDaoService);
|
||||
cleaner.setAvmNodeDAO(avmNodeDAO);
|
||||
cleaner.setStores(Collections.singletonList(store));
|
||||
cleaner.setListeners(Collections.singletonList(listener));
|
||||
}
|
||||
|
||||
public void tearDown() throws Exception
|
||||
@@ -293,18 +286,49 @@ public class ContentStoreCleanerTest extends TestCase
|
||||
|
||||
public void testImmediateRemoval() throws Exception
|
||||
{
|
||||
cleaner.setProtectDays(0);
|
||||
// add some content to the store
|
||||
ContentWriter writer = store.getWriter(ContentStore.NEW_CONTENT_CONTEXT);
|
||||
writer.putContent("ABC");
|
||||
String contentUrl = writer.getContentUrl();
|
||||
eagerCleaner.setEagerOrphanCleanup(false);
|
||||
|
||||
final StoreRef storeRef = nodeService.createStore("test", getName() + "-" + GUID.generate());
|
||||
RetryingTransactionCallback<ContentData> testCallback = new RetryingTransactionCallback<ContentData>()
|
||||
{
|
||||
public ContentData execute() throws Throwable
|
||||
{
|
||||
// Create some content
|
||||
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||
Map<QName, Serializable> properties = new HashMap<QName, Serializable>(13);
|
||||
properties.put(ContentModel.PROP_NAME, (Serializable)"test.txt");
|
||||
NodeRef contentNodeRef = nodeService.createNode(
|
||||
rootNodeRef,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
ContentModel.ASSOC_CHILDREN,
|
||||
ContentModel.TYPE_CONTENT,
|
||||
properties).getChildRef();
|
||||
ContentWriter writer = contentService.getWriter(contentNodeRef, ContentModel.PROP_CONTENT, true);
|
||||
writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN);
|
||||
writer.putContent("INITIAL CONTENT");
|
||||
ContentData contentData = writer.getContentData();
|
||||
|
||||
// Delete the first node
|
||||
nodeService.deleteNode(contentNodeRef);
|
||||
|
||||
// Done
|
||||
return contentData;
|
||||
}
|
||||
};
|
||||
ContentData contentData = transactionService.getRetryingTransactionHelper().doInTransaction(testCallback);
|
||||
// Make sure that the content URL still exists
|
||||
ContentReader reader = contentService.getRawReader(contentData.getContentUrl());
|
||||
assertNotNull(reader);
|
||||
assertTrue("Content should not have been eagerly deleted.", reader.exists());
|
||||
|
||||
// fire the cleaner
|
||||
cleaner.setProtectDays(0);
|
||||
cleaner.execute();
|
||||
|
||||
reader = contentService.getRawReader(contentData.getContentUrl());
|
||||
// the content should have disappeared as it is not in the database
|
||||
assertFalse("Unprotected content was not deleted", store.exists(contentUrl));
|
||||
assertTrue("Content listener was not called", deletedUrls.contains(contentUrl));
|
||||
assertFalse("Unprotected content was not deleted", reader.exists());
|
||||
assertTrue("Content listener was not called", deletedUrls.contains(reader.getContentUrl()));
|
||||
}
|
||||
|
||||
public void testProtectedRemoval() throws Exception
|
||||
|
@@ -143,10 +143,6 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
|
||||
*/
|
||||
public void registerNewContentUrl(String contentUrl)
|
||||
{
|
||||
if (!eagerOrphanCleanup)
|
||||
{
|
||||
return;
|
||||
}
|
||||
Set<String> urlsToDelete = TransactionalResourceHelper.getSet(KEY_POST_ROLLBACK_DELETION_URLS);
|
||||
urlsToDelete.add(contentUrl);
|
||||
// Register to listen for transaction rollback
|
||||
@@ -158,7 +154,18 @@ public class EagerContentStoreCleaner extends TransactionListenerAdapter
|
||||
*/
|
||||
public void registerOrphanedContentUrl(String contentUrl)
|
||||
{
|
||||
if (!eagerOrphanCleanup)
|
||||
registerOrphanedContentUrl(contentUrl, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues orphaned content for post-transaction removal
|
||||
*
|
||||
* @param force <tt>true</tt> for force the post-commit URL deletion
|
||||
* regardless of the setting {@link #setEagerOrphanCleanup(boolean)}.
|
||||
*/
|
||||
public void registerOrphanedContentUrl(String contentUrl, boolean force)
|
||||
{
|
||||
if (!eagerOrphanCleanup && !force)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
@@ -1,59 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2009 Alfresco Software Limited.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing"
|
||||
*/
|
||||
package org.alfresco.repo.domain.contentclean;
|
||||
|
||||
|
||||
/**
|
||||
* DAO services for <b>alf_contentclean</b> table.
|
||||
* This DAO is geared towards bulk processing of content URLs.
|
||||
* <p>
|
||||
* Content URLs are lowercased and CRC'ed
|
||||
*
|
||||
* @author Derek Hulley
|
||||
* @since 3.2
|
||||
*/
|
||||
public interface ContentCleanDAO
|
||||
{
|
||||
/**
|
||||
* Interface callback for putting and getting content URL values
|
||||
*
|
||||
* @author Derek Hulley
|
||||
* @since 3.2
|
||||
*/
|
||||
public interface ContentUrlBatchProcessor
|
||||
{
|
||||
void start();
|
||||
void processContentUrl(String contentUrl);
|
||||
void end();
|
||||
}
|
||||
|
||||
void cleanUp();
|
||||
|
||||
ContentUrlBatchProcessor getUrlInserter();
|
||||
|
||||
ContentUrlBatchProcessor getUrlRemover();
|
||||
|
||||
void listAllUrls(ContentUrlBatchProcessor batchProcessor);
|
||||
}
|
@@ -1,89 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2009 Alfresco Software Limited.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing"
|
||||
*/
|
||||
package org.alfresco.repo.domain.contentclean;
|
||||
|
||||
import org.alfresco.util.EqualsHelper;
|
||||
|
||||
/**
|
||||
* Entity bean for <b>alf_content_url</b> table.
|
||||
* <p>
|
||||
* These are unique (see {@link #equals(Object) equals} and {@link #hashCode() hashCode}) based
|
||||
* on the {@link #getContentUrl() content URL} value.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
* @since 3.2
|
||||
*/
|
||||
public class ContentCleanEntity
|
||||
{
|
||||
private String contentUrl;
|
||||
|
||||
public ContentCleanEntity()
|
||||
{
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode()
|
||||
{
|
||||
return (contentUrl == null ? 0 : contentUrl.hashCode());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if (this == obj)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
else if (obj instanceof ContentCleanEntity)
|
||||
{
|
||||
ContentCleanEntity that = (ContentCleanEntity) obj;
|
||||
return EqualsHelper.nullSafeEquals(this.contentUrl, that.contentUrl);
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
StringBuilder sb = new StringBuilder(512);
|
||||
sb.append("ContentCleanEntity")
|
||||
.append("[ contentUrl=").append(contentUrl)
|
||||
.append("]");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public String getContentUrl()
|
||||
{
|
||||
return contentUrl;
|
||||
}
|
||||
|
||||
public void setContentUrl(String contentUrl)
|
||||
{
|
||||
this.contentUrl = contentUrl;
|
||||
}
|
||||
}
|
@@ -1,254 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2005-2009 Alfresco Software Limited.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing"
|
||||
*/
|
||||
package org.alfresco.repo.domain.contentclean.ibatis;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.domain.contentclean.ContentCleanDAO;
|
||||
import org.alfresco.repo.domain.contentclean.ContentCleanEntity;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.orm.ibatis.SqlMapClientTemplate;
|
||||
|
||||
import com.ibatis.sqlmap.client.SqlMapClient;
|
||||
import com.ibatis.sqlmap.client.event.RowHandler;
|
||||
|
||||
/**
|
||||
* iBatis-specific implementation of the Content Cleaner DAO.
|
||||
*
|
||||
* @author Derek Hulley
|
||||
* @since 3.2
|
||||
*/
|
||||
public class ContentCleanDAOImpl implements ContentCleanDAO
|
||||
{
|
||||
private static Log logger = LogFactory.getLog(ContentCleanDAOImpl.class);
|
||||
|
||||
private static final int DEFAULT_BATCH_SIZE = 50;
|
||||
|
||||
private static final String INSERT_CONTENT_CLEAN = "alfresco.content.insert_ContentCleanUrl";
|
||||
private static final String SELECT_CONTENT_CLEAN_URLS = "alfresco.content.select_ContentCleanUrls";
|
||||
private static final String DELETE_CONTENT_CLEAN_BY_URL = "alfresco.content.delete_ContentCleanUrl";
|
||||
private static final String DELETE_CONTENT_CLEAN = "alfresco.content.delete_ContentCleanUrls";
|
||||
|
||||
private SqlMapClientTemplate template;
|
||||
|
||||
public void setSqlMapClientTemplate(SqlMapClientTemplate sqlMapClientTemplate)
|
||||
{
|
||||
this.template = sqlMapClientTemplate;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public ContentUrlBatchProcessor getUrlInserter()
|
||||
{
|
||||
final SqlMapClient sqlMapClient = template.getSqlMapClient();
|
||||
ContentUrlBatchProcessor processor = new ContentUrlBatchProcessor()
|
||||
{
|
||||
private int count = 0;
|
||||
private int total = 0;
|
||||
|
||||
public void start()
|
||||
{
|
||||
try
|
||||
{
|
||||
sqlMapClient.startBatch();
|
||||
count = 0;
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
// Batches not supported, so don't do batching
|
||||
count = -1;
|
||||
}
|
||||
}
|
||||
public void processContentUrl(String contentUrl)
|
||||
{
|
||||
ContentCleanEntity contentCleanEntity = new ContentCleanEntity();
|
||||
contentCleanEntity.setContentUrl(contentUrl == null ? null : contentUrl.toLowerCase());
|
||||
template.insert(INSERT_CONTENT_CLEAN, contentCleanEntity);
|
||||
// Write the batch
|
||||
executeBatch();
|
||||
total++;
|
||||
}
|
||||
public void end()
|
||||
{
|
||||
// Write the batch
|
||||
executeBatch();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(" Inserted " + total + " content URLs (FINISHED)");
|
||||
}
|
||||
}
|
||||
private void executeBatch()
|
||||
{
|
||||
// Are we batching?
|
||||
if (count > -1)
|
||||
{
|
||||
// Write the batch, if required
|
||||
if (++count >= DEFAULT_BATCH_SIZE)
|
||||
{
|
||||
try
|
||||
{
|
||||
sqlMapClient.executeBatch();
|
||||
sqlMapClient.startBatch();
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to execute batch", e);
|
||||
}
|
||||
count = 0;
|
||||
}
|
||||
}
|
||||
if (logger.isDebugEnabled() && (total == 0 || (total % 1000 == 0) ))
|
||||
{
|
||||
logger.debug(" Inserted " + total + " content URLs");
|
||||
}
|
||||
}
|
||||
};
|
||||
// Done
|
||||
return processor;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public ContentUrlBatchProcessor getUrlRemover()
|
||||
{
|
||||
final SqlMapClient sqlMapClient = template.getSqlMapClient();
|
||||
ContentUrlBatchProcessor processor = new ContentUrlBatchProcessor()
|
||||
{
|
||||
private int count = 0;
|
||||
private int total = 0;
|
||||
|
||||
public void start()
|
||||
{
|
||||
try
|
||||
{
|
||||
sqlMapClient.startBatch();
|
||||
count = 0;
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
// Batches not supported, so don't do batching
|
||||
count = -1;
|
||||
}
|
||||
}
|
||||
public void processContentUrl(String contentUrl)
|
||||
{
|
||||
ContentCleanEntity contentCleanEntity = new ContentCleanEntity();
|
||||
contentCleanEntity.setContentUrl(contentUrl);
|
||||
template.delete(DELETE_CONTENT_CLEAN_BY_URL, contentCleanEntity);
|
||||
// Write the batch
|
||||
executeBatch();
|
||||
total++;
|
||||
}
|
||||
public void end()
|
||||
{
|
||||
// Write the batch
|
||||
executeBatch();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(" Removed " + total + " content URLs (FINISHED)");
|
||||
}
|
||||
}
|
||||
private void executeBatch()
|
||||
{
|
||||
// Are we batching?
|
||||
if (count > -1)
|
||||
{
|
||||
// Write the batch, if required
|
||||
if (++count >= DEFAULT_BATCH_SIZE)
|
||||
{
|
||||
try
|
||||
{
|
||||
sqlMapClient.executeBatch();
|
||||
sqlMapClient.startBatch();
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Failed to execute batch", e);
|
||||
}
|
||||
count = 0;
|
||||
}
|
||||
}
|
||||
if (logger.isDebugEnabled() && (total == 0 || (total % 1000 == 0) ))
|
||||
{
|
||||
logger.debug(" Removed " + total + " content URLs");
|
||||
}
|
||||
}
|
||||
};
|
||||
// Done
|
||||
return processor;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public void listAllUrls(ContentUrlBatchProcessor batchProcessor)
|
||||
{
|
||||
ListAllRowHandler rowHandler = new ListAllRowHandler(batchProcessor);
|
||||
|
||||
batchProcessor.start();
|
||||
template.queryWithRowHandler(SELECT_CONTENT_CLEAN_URLS, rowHandler);
|
||||
batchProcessor.end();
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug(" Listed " + rowHandler.total + " content URLs");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Row handler for listing all content clean URLs
|
||||
* @author Derek Hulley
|
||||
* @since 3.2
|
||||
*/
|
||||
private static class ListAllRowHandler implements RowHandler
|
||||
{
|
||||
private final ContentUrlBatchProcessor batchProcessor;
|
||||
private int total = 0;
|
||||
private ListAllRowHandler(ContentUrlBatchProcessor batchProcessor)
|
||||
{
|
||||
this.batchProcessor = batchProcessor;
|
||||
}
|
||||
public void handleRow(Object valueObject)
|
||||
{
|
||||
batchProcessor.processContentUrl((String)valueObject);
|
||||
total++;
|
||||
if (logger.isDebugEnabled() && (total == 0 || (total % 1000 == 0) ))
|
||||
{
|
||||
logger.debug(" Listed " + total + " content URLs");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public void cleanUp()
|
||||
{
|
||||
template.delete(DELETE_CONTENT_CLEAN);
|
||||
}
|
||||
}
|
@@ -29,6 +29,8 @@ import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.cache.SimpleCache;
|
||||
import org.alfresco.repo.cache.lookup.EntityLookupCache;
|
||||
import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor;
|
||||
import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner;
|
||||
import org.alfresco.repo.domain.LocaleDAO;
|
||||
import org.alfresco.repo.domain.encoding.EncodingDAO;
|
||||
@@ -37,10 +39,12 @@ import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.TransactionListenerAdapter;
|
||||
import org.alfresco.repo.transaction.TransactionalResourceHelper;
|
||||
import org.alfresco.service.cmr.repository.ContentData;
|
||||
import org.springframework.extensions.surf.util.Pair;
|
||||
import org.alfresco.util.EqualsHelper;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.extensions.surf.util.Pair;
|
||||
|
||||
/**
|
||||
* Abstract implementation for ContentData DAO.
|
||||
@@ -56,6 +60,7 @@ import org.springframework.dao.ConcurrencyFailureException;
|
||||
*/
|
||||
public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
{
|
||||
private static final String CACHE_REGION_CONTENT_DATA = "ContentData";
|
||||
/**
|
||||
* Content URL IDs to delete before final commit.
|
||||
*/
|
||||
@@ -63,11 +68,28 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
|
||||
private static Log logger = LogFactory.getLog(AbstractContentDataDAOImpl.class);
|
||||
|
||||
private final ContentDataCallbackDAO contentDataCallbackDAO;
|
||||
private MimetypeDAO mimetypeDAO;
|
||||
private EncodingDAO encodingDAO;
|
||||
private LocaleDAO localeDAO;
|
||||
private EagerContentStoreCleaner contentStoreCleaner;
|
||||
private SimpleCache<Serializable, Serializable> contentDataCache;
|
||||
|
||||
/**
|
||||
* Cache for the ContentData class:<br/>
|
||||
* KEY: ID<br/>
|
||||
* VALUE: ContentData object<br/>
|
||||
* VALUE KEY: NONE<br/>
|
||||
*/
|
||||
private EntityLookupCache<Long, ContentData, Serializable> contentDataCache;
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
public AbstractContentDataDAOImpl()
|
||||
{
|
||||
this.contentDataCallbackDAO = new ContentDataCallbackDAO();
|
||||
this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>(contentDataCallbackDAO);
|
||||
}
|
||||
|
||||
public void setMimetypeDAO(MimetypeDAO mimetypeDAO)
|
||||
{
|
||||
@@ -97,9 +119,12 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
/**
|
||||
* @param contentDataCache the cache of IDs to ContentData and vice versa
|
||||
*/
|
||||
public void setContentDataCache(SimpleCache<Serializable, Serializable> contentDataCache)
|
||||
public void setContentDataCache(SimpleCache<Long, ContentData> contentDataCache)
|
||||
{
|
||||
this.contentDataCache = contentDataCache;
|
||||
this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>(
|
||||
contentDataCache,
|
||||
CACHE_REGION_CONTENT_DATA,
|
||||
contentDataCallbackDAO);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -114,7 +139,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
* A <b>content_url</b> entity was dereferenced. This makes no assumptions about the
|
||||
* current references - dereference deletion is handled in the commit phase.
|
||||
*/
|
||||
protected void registerDereferenceContentUrl(String contentUrl)
|
||||
protected void registerDereferencedContentUrl(String contentUrl)
|
||||
{
|
||||
Set<String> contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS);
|
||||
if (contentUrls.size() == 0)
|
||||
@@ -130,12 +155,12 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
*/
|
||||
public Pair<Long, ContentData> createContentData(ContentData contentData)
|
||||
{
|
||||
/*
|
||||
* TODO: Cache
|
||||
*/
|
||||
ContentDataEntity contentDataEntity = createContentDataEntity(contentData);
|
||||
// Done
|
||||
return new Pair<Long, ContentData>(contentDataEntity.getId(), contentData);
|
||||
if (contentData == null)
|
||||
{
|
||||
throw new IllegalArgumentException("ContentData values cannot be null");
|
||||
}
|
||||
Pair<Long, ContentData> entityPair = contentDataCache.getOrCreateByValue(contentData);
|
||||
return entityPair;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -143,18 +168,36 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
*/
|
||||
public Pair<Long, ContentData> getContentData(Long id)
|
||||
{
|
||||
/*
|
||||
* TODO: Cache
|
||||
*/
|
||||
ContentDataEntity contentDataEntity = getContentDataEntity(id);
|
||||
if (contentDataEntity == null)
|
||||
if (id == null)
|
||||
{
|
||||
return null;
|
||||
throw new IllegalArgumentException("Cannot look up ContentData by null ID.");
|
||||
}
|
||||
Pair<Long, ContentData> entityPair = contentDataCache.getByKey(id);
|
||||
if (entityPair == null)
|
||||
{
|
||||
throw new DataIntegrityViolationException("No ContentData value exists for ID " + id);
|
||||
}
|
||||
return entityPair;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public void updateContentData(Long id, ContentData contentData)
|
||||
{
|
||||
if (id == null)
|
||||
{
|
||||
throw new IllegalArgumentException("Cannot look up ContentData by null ID.");
|
||||
}
|
||||
if (contentData == null)
|
||||
{
|
||||
throw new IllegalArgumentException("Cannot update ContentData with a null.");
|
||||
}
|
||||
int updated = contentDataCache.updateValue(id, contentData);
|
||||
if (updated < 1)
|
||||
{
|
||||
throw new ConcurrencyFailureException("ContentData with ID " + id + " not updated");
|
||||
}
|
||||
// Convert back to ContentData
|
||||
ContentData contentData = makeContentData(contentDataEntity);
|
||||
// Done
|
||||
return new Pair<Long, ContentData>(id, contentData);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -162,14 +205,60 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
*/
|
||||
public void deleteContentData(Long id)
|
||||
{
|
||||
int deleted = deleteContentDataEntity(id);
|
||||
if (id == null)
|
||||
{
|
||||
throw new IllegalArgumentException("Cannot delete ContentData by null ID.");
|
||||
}
|
||||
int deleted = contentDataCache.deleteByKey(id);
|
||||
if (deleted < 1)
|
||||
{
|
||||
throw new ConcurrencyFailureException("ContetntData with ID " + id + " no longer exists");
|
||||
throw new ConcurrencyFailureException("ContentData with ID " + id + " no longer exists");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback for <b>alf_content_data</b> DAO.
|
||||
*/
|
||||
private class ContentDataCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentData, Serializable>
|
||||
{
|
||||
public Pair<Long, ContentData> createValue(ContentData value)
|
||||
{
|
||||
ContentDataEntity contentDataEntity = createContentDataEntity(value);
|
||||
// Done
|
||||
return new Pair<Long, ContentData>(contentDataEntity.getId(), value);
|
||||
}
|
||||
|
||||
public Pair<Long, ContentData> findByKey(Long key)
|
||||
{
|
||||
ContentDataEntity contentDataEntity = getContentDataEntity(key);
|
||||
if (contentDataEntity == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
ContentData contentData = makeContentData(contentDataEntity);
|
||||
// Done
|
||||
return new Pair<Long, ContentData>(key, contentData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int updateValue(Long key, ContentData value)
|
||||
{
|
||||
ContentDataEntity contentDataEntity = getContentDataEntity(key);
|
||||
if (contentDataEntity == null)
|
||||
{
|
||||
return 0; // The client (outer-level code) will decide if this is an error
|
||||
}
|
||||
return updateContentDataEntity(contentDataEntity, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int deleteByKey(Long key)
|
||||
{
|
||||
return deleteContentDataEntity(key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates this instance into an externally-usable <code>ContentData</code> instance.
|
||||
*/
|
||||
@@ -247,14 +336,67 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
return contentDataEntity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates the {@link ContentData} into persistable values using the helper DAOs
|
||||
*/
|
||||
private int updateContentDataEntity(ContentDataEntity contentDataEntity, ContentData contentData)
|
||||
{
|
||||
// Resolve the content URL
|
||||
String oldContentUrl = contentDataEntity.getContentUrl();
|
||||
String newContentUrl = contentData.getContentUrl();
|
||||
if (!EqualsHelper.nullSafeEquals(oldContentUrl, newContentUrl))
|
||||
{
|
||||
if (oldContentUrl != null)
|
||||
{
|
||||
// We have a changed value. The old content URL has been dereferenced.
|
||||
registerDereferencedContentUrl(oldContentUrl);
|
||||
}
|
||||
if (newContentUrl != null)
|
||||
{
|
||||
Long contentUrlId = getOrCreateContentUrlEntity(newContentUrl, contentData.getSize()).getId();
|
||||
contentDataEntity.setContentUrlId(contentUrlId);
|
||||
contentDataEntity.setContentUrl(newContentUrl);
|
||||
}
|
||||
else
|
||||
{
|
||||
contentDataEntity.setContentUrlId(null);
|
||||
contentDataEntity.setContentUrl(null);
|
||||
}
|
||||
}
|
||||
// Resolve the mimetype
|
||||
Long mimetypeId = null;
|
||||
String mimetype = contentData.getMimetype();
|
||||
if (mimetype != null)
|
||||
{
|
||||
mimetypeId = mimetypeDAO.getOrCreateMimetype(mimetype).getFirst();
|
||||
}
|
||||
// Resolve the encoding
|
||||
Long encodingId = null;
|
||||
String encoding = contentData.getEncoding();
|
||||
if (encoding != null)
|
||||
{
|
||||
encodingId = encodingDAO.getOrCreateEncoding(encoding).getFirst();
|
||||
}
|
||||
// Resolve the locale
|
||||
Long localeId = null;
|
||||
Locale locale = contentData.getLocale();
|
||||
if (locale != null)
|
||||
{
|
||||
localeId = localeDAO.getOrCreateLocalePair(locale).getFirst();
|
||||
}
|
||||
|
||||
contentDataEntity.setMimetypeId(mimetypeId);
|
||||
contentDataEntity.setEncodingId(encodingId);
|
||||
contentDataEntity.setLocaleId(localeId);
|
||||
|
||||
return updateContentDataEntity(contentDataEntity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Caching method that creates an entity for <b>content_url_entity</b>.
|
||||
*/
|
||||
private ContentUrlEntity getOrCreateContentUrlEntity(String contentUrl, long size)
|
||||
{
|
||||
/*
|
||||
* TODO: Check for cache requirements
|
||||
*/
|
||||
// Create the content URL entity
|
||||
ContentUrlEntity contentUrlEntity = getContentUrlEntity(contentUrl);
|
||||
// If it exists, then we can just re-use it, but check that the size is consistent
|
||||
@@ -304,10 +446,13 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
protected abstract ContentUrlEntity getContentUrlEntityUnreferenced(String contentUrl);
|
||||
|
||||
/**
|
||||
* Delete the entity with the given ID
|
||||
* @return Returns the number of rows deleted
|
||||
* Update a content URL with the given orphan time
|
||||
*
|
||||
* @param id the unique ID of the entity
|
||||
* @param orphanTime the time (ms since epoch) that the entity was orphaned
|
||||
* @return Returns the number of rows updated
|
||||
*/
|
||||
protected abstract int deleteContentUrlEntity(Long id);
|
||||
protected abstract int updateContentUrlOrphanTime(Long id, long orphanTime);
|
||||
|
||||
/**
|
||||
* Create the row for the <b>alf_content_data<b>
|
||||
@@ -324,6 +469,14 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
*/
|
||||
protected abstract ContentDataEntity getContentDataEntity(Long id);
|
||||
|
||||
/**
|
||||
* Update an existing <b>alf_content_data</b> entity
|
||||
*
|
||||
* @param entity the existing entity that will be updated
|
||||
* @return Returns the number of rows updated (should be 1)
|
||||
*/
|
||||
protected abstract int updateContentDataEntity(ContentDataEntity entity);
|
||||
|
||||
/**
|
||||
* Delete the entity with the given ID
|
||||
*
|
||||
@@ -347,6 +500,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
return;
|
||||
}
|
||||
Set<String> contentUrls = TransactionalResourceHelper.getSet(KEY_PRE_COMMIT_CONTENT_URL_DELETIONS);
|
||||
long orphanTime = System.currentTimeMillis();
|
||||
for (String contentUrl : contentUrls)
|
||||
{
|
||||
ContentUrlEntity contentUrlEntity = getContentUrlEntityUnreferenced(contentUrl);
|
||||
@@ -355,9 +509,9 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
|
||||
// It is still referenced, so ignore it
|
||||
continue;
|
||||
}
|
||||
// It needs to be deleted
|
||||
// We mark the URL as orphaned.
|
||||
Long contentUrlId = contentUrlEntity.getId();
|
||||
deleteContentUrlEntity(contentUrlId);
|
||||
updateContentUrlOrphanTime(contentUrlId, orphanTime);
|
||||
// Pop this in the queue for deletion from the content store
|
||||
contentStoreCleaner.registerOrphanedContentUrl(contentUrl);
|
||||
}
|
||||
|
@@ -24,6 +24,7 @@
|
||||
*/
|
||||
package org.alfresco.repo.domain.contentdata;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
@@ -47,6 +48,14 @@ public interface ContentDataDAO
|
||||
*/
|
||||
Pair<Long, ContentData> createContentData(ContentData contentData);
|
||||
|
||||
/**
|
||||
* Update a content data instance
|
||||
*
|
||||
* @param id the unique ID of the entity
|
||||
* @param contentData the new data
|
||||
*/
|
||||
void updateContentData(Long id, ContentData contentData);
|
||||
|
||||
/**
|
||||
* @param id the unique ID of the entity
|
||||
* @return the ContentData pair (id, ContentData) or <tt>null</tt> if it doesn't exist
|
||||
@@ -77,13 +86,28 @@ public interface ContentDataDAO
|
||||
*/
|
||||
public static interface ContentUrlHandler
|
||||
{
|
||||
void handle(String contentUrl);
|
||||
void handle(Long id, String contentUrl, Long orphanTime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumerate all available content URLs
|
||||
* Enumerate all available content URLs that were orphaned on or before the given time
|
||||
*
|
||||
* @param contentUrlHandler
|
||||
* @param contentUrlHandler the callback object to process the rows
|
||||
* @param maxOrphanTime the maximum orphan time
|
||||
*/
|
||||
void getAllContentUrls(ContentUrlHandler contentUrlHandler);
|
||||
void getContentUrlsOrphaned(ContentUrlHandler contentUrlHandler, long maxOrphanTime);
|
||||
|
||||
/**
|
||||
* Enumerate all available content URLs that were orphaned on or before the given time
|
||||
*
|
||||
* @param contentUrlHandler the callback object to process the rows
|
||||
* @param maxOrphanTime the maximum orphan time
|
||||
* @param maxResults the maximum number of results (1 or greater)
|
||||
*/
|
||||
void getContentUrlsOrphaned(ContentUrlHandler contentUrlHandler, long maxOrphanTime, int maxResults);
|
||||
|
||||
/**
|
||||
* Delete a batch of content URL entities.
|
||||
*/
|
||||
int deleteContentUrls(List<Long> ids);
|
||||
}
|
||||
|
@@ -34,6 +34,7 @@ import org.alfresco.repo.content.ContentContext;
|
||||
import org.alfresco.repo.content.ContentStore;
|
||||
import org.alfresco.repo.content.MimetypeMap;
|
||||
import org.alfresco.repo.content.filestore.FileContentStore;
|
||||
import org.alfresco.repo.domain.contentdata.ContentDataDAO.ContentUrlHandler;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.ServiceRegistry;
|
||||
@@ -44,6 +45,7 @@ import org.springframework.extensions.surf.util.Pair;
|
||||
import org.alfresco.util.TempFileProvider;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
|
||||
/**
|
||||
* @see ContentDataDAO
|
||||
@@ -85,6 +87,32 @@ public class ContentDataDAOTest extends TestCase
|
||||
return txnHelper.doInTransaction(callback, false, false);
|
||||
}
|
||||
|
||||
private Pair<Long, ContentData> update(final Long id, final ContentData contentData)
|
||||
{
|
||||
RetryingTransactionCallback<Pair<Long, ContentData>> callback = new RetryingTransactionCallback<Pair<Long, ContentData>>()
|
||||
{
|
||||
public Pair<Long, ContentData> execute() throws Throwable
|
||||
{
|
||||
contentDataDAO.updateContentData(id, contentData);
|
||||
return new Pair<Long, ContentData>(id, contentData);
|
||||
}
|
||||
};
|
||||
return txnHelper.doInTransaction(callback, false, false);
|
||||
}
|
||||
|
||||
private void delete(final Long id)
|
||||
{
|
||||
RetryingTransactionCallback<Void> callback = new RetryingTransactionCallback<Void>()
|
||||
{
|
||||
public Void execute() throws Throwable
|
||||
{
|
||||
contentDataDAO.deleteContentData(id);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
txnHelper.doInTransaction(callback, false, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves and checks the ContentData for equality
|
||||
*/
|
||||
@@ -118,7 +146,15 @@ public class ContentDataDAOTest extends TestCase
|
||||
|
||||
public void testGetWithInvalidId()
|
||||
{
|
||||
assertNull("Expected null for invalid ID", contentDataDAO.getContentData(-1L));
|
||||
try
|
||||
{
|
||||
contentDataDAO.getContentData(-1L);
|
||||
fail("Invalid ContentData IDs must generate DataIntegrityViolationException.");
|
||||
}
|
||||
catch (DataIntegrityViolationException e)
|
||||
{
|
||||
// Expected
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -164,13 +200,27 @@ public class ContentDataDAOTest extends TestCase
|
||||
getAndCheck(resultPairLower.getFirst(), contentDataLower);
|
||||
}
|
||||
|
||||
public void testUpdate() throws Exception
|
||||
{
|
||||
ContentData contentData = getContentData();
|
||||
Pair<Long, ContentData> resultPair = create(contentData);
|
||||
Long id = resultPair.getFirst();
|
||||
// Update
|
||||
contentData = ContentData.setMimetype(contentData, MimetypeMap.MIMETYPE_HTML);
|
||||
contentData = ContentData.setEncoding(contentData, "UTF-16");
|
||||
// Don't update the content itself
|
||||
update(id, contentData);
|
||||
// Check
|
||||
getAndCheck(id, contentData);
|
||||
}
|
||||
|
||||
public void testDelete() throws Exception
|
||||
{
|
||||
ContentData contentData = getContentData();
|
||||
|
||||
Pair<Long, ContentData> resultPair = create(contentData);
|
||||
getAndCheck(resultPair.getFirst(), contentData);
|
||||
contentDataDAO.deleteContentData(resultPair.getFirst());
|
||||
delete(resultPair.getFirst());
|
||||
try
|
||||
{
|
||||
getAndCheck(resultPair.getFirst(), contentData);
|
||||
@@ -182,6 +232,66 @@ public class ContentDataDAOTest extends TestCase
|
||||
}
|
||||
}
|
||||
|
||||
public void testContentUrl_FetchingOrphansNoLimit() throws Exception
|
||||
{
|
||||
ContentData contentData = getContentData();
|
||||
Pair<Long, ContentData> resultPair = create(contentData);
|
||||
getAndCheck(resultPair.getFirst(), contentData);
|
||||
delete(resultPair.getFirst());
|
||||
// The content URL is orphaned
|
||||
final String contentUrlOrphaned = contentData.getContentUrl();
|
||||
final boolean[] found = new boolean[] {false};
|
||||
|
||||
// Iterate over all orphaned content URLs and ensure that we hit the one we just orphaned
|
||||
ContentUrlHandler handler = new ContentUrlHandler()
|
||||
{
|
||||
public void handle(Long id, String contentUrl, Long orphanTime)
|
||||
{
|
||||
// Check
|
||||
if (id == null || contentUrl == null || orphanTime == null)
|
||||
{
|
||||
fail("Invalid orphan data returned to handler: " + id + "-" + contentUrl + "-" + orphanTime);
|
||||
}
|
||||
// Did we get the one we wanted?
|
||||
if (contentUrl.equals(contentUrlOrphaned))
|
||||
{
|
||||
found[0] = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
contentDataDAO.getContentUrlsOrphaned(handler, Long.MAX_VALUE);
|
||||
assertTrue("Newly-orphaned content URL not found", found[0]);
|
||||
}
|
||||
|
||||
public void testContentUrl_FetchingOrphansWithLimit() throws Exception
|
||||
{
|
||||
// Orphan some content
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
ContentData contentData = getContentData();
|
||||
Pair<Long, ContentData> resultPair = create(contentData);
|
||||
getAndCheck(resultPair.getFirst(), contentData);
|
||||
delete(resultPair.getFirst());
|
||||
}
|
||||
final int[] count = new int[] {0};
|
||||
|
||||
// Iterate over all orphaned content URLs and ensure that we hit the one we just orphaned
|
||||
ContentUrlHandler handler = new ContentUrlHandler()
|
||||
{
|
||||
public void handle(Long id, String contentUrl, Long orphanTime)
|
||||
{
|
||||
// Check
|
||||
if (id == null || contentUrl == null || orphanTime == null)
|
||||
{
|
||||
fail("Invalid orphan data returned to handler: " + id + "-" + contentUrl + "-" + orphanTime);
|
||||
}
|
||||
count[0]++;
|
||||
}
|
||||
};
|
||||
contentDataDAO.getContentUrlsOrphaned(handler, Long.MAX_VALUE, 5);
|
||||
assertEquals("Expected exactly 5 results callbacks", 5, count[0]);
|
||||
}
|
||||
|
||||
private static final String[] MIMETYPES = new String[]
|
||||
{
|
||||
MimetypeMap.MIMETYPE_ACP,
|
||||
|
@@ -92,6 +92,18 @@ public class ContentDataEntity
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public void incrementVersion()
|
||||
{
|
||||
if (version >= Short.MAX_VALUE)
|
||||
{
|
||||
this.version = 0L;
|
||||
}
|
||||
else
|
||||
{
|
||||
this.version++;
|
||||
}
|
||||
}
|
||||
|
||||
public Long getId()
|
||||
{
|
||||
return id;
|
||||
|
@@ -43,11 +43,11 @@ public class ContentUrlEntity
|
||||
public static final String EMPTY_URL = "empty";
|
||||
|
||||
private Long id;
|
||||
private Long version;
|
||||
private String contentUrl;
|
||||
private String contentUrlShort;
|
||||
private long contentUrlCrc;
|
||||
private long size;
|
||||
private Long orphanTime;
|
||||
|
||||
public ContentUrlEntity()
|
||||
{
|
||||
@@ -86,6 +86,7 @@ public class ContentUrlEntity
|
||||
.append("[ ID=").append(id)
|
||||
.append(", contentUrl=").append(contentUrl)
|
||||
.append(", size=").append(size)
|
||||
.append(", orphanTime=").append(orphanTime)
|
||||
.append("]");
|
||||
return sb.toString();
|
||||
}
|
||||
@@ -129,16 +130,6 @@ public class ContentUrlEntity
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Long getVersion()
|
||||
{
|
||||
return version;
|
||||
}
|
||||
|
||||
public void setVersion(Long version)
|
||||
{
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public String getContentUrl()
|
||||
{
|
||||
// Convert the persisted content URL to an external value
|
||||
@@ -195,4 +186,14 @@ public class ContentUrlEntity
|
||||
{
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
public Long getOrphanTime()
|
||||
{
|
||||
return orphanTime;
|
||||
}
|
||||
|
||||
public void setOrphanTime(Long orphanTime)
|
||||
{
|
||||
this.orphanTime = orphanTime;
|
||||
}
|
||||
}
|
||||
|
@@ -47,13 +47,15 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
|
||||
private static final String SELECT_CONTENT_URL_BY_ID = "alfresco.content.select_ContentUrlById";
|
||||
private static final String SELECT_CONTENT_URL_BY_KEY = "alfresco.content.select_ContentUrlByKey";
|
||||
private static final String SELECT_CONTENT_URL_BY_KEY_UNREFERENCED = "alfresco.content.select_ContentUrlByKeyUnreferenced";
|
||||
private static final String SELECT_CONTENT_URLS = "alfresco.content.select_ContentUrls";
|
||||
private static final String SELECT_CONTENT_URLS_BY_ORPHAN_TIME = "alfresco.content.select_ContentUrlByOrphanTime";
|
||||
private static final String SELECT_CONTENT_DATA_BY_ID = "alfresco.content.select_ContentDataById";
|
||||
private static final String SELECT_CONTENT_DATA_BY_NODE_AND_QNAME = "alfresco.content.select_ContentDataByNodeAndQName";
|
||||
private static final String INSERT_CONTENT_URL = "alfresco.content.insert_ContentUrl";
|
||||
private static final String INSERT_CONTENT_DATA = "alfresco.content.insert_ContentData";
|
||||
private static final String UPDATE_CONTENT_URL_ORPHAN_TIME = "alfresco.content.update_ContentUrlOrphanTime";
|
||||
private static final String UPDATE_CONTENT_DATA = "alfresco.content.update_ContentData";
|
||||
private static final String DELETE_CONTENT_DATA = "alfresco.content.delete_ContentData";
|
||||
private static final String DELETE_CONTENT_URL = "alfresco.content.delete_ContentUrl";
|
||||
private static final String DELETE_CONTENT_URLS = "alfresco.content.delete_ContentUrls";
|
||||
|
||||
private SqlMapClientTemplate template;
|
||||
|
||||
@@ -66,9 +68,9 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
|
||||
protected ContentUrlEntity createContentUrlEntity(String contentUrl, long size)
|
||||
{
|
||||
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
|
||||
contentUrlEntity.setVersion(ContentUrlEntity.CONST_LONG_ZERO);
|
||||
contentUrlEntity.setContentUrl(contentUrl);
|
||||
contentUrlEntity.setSize(size);
|
||||
contentUrlEntity.setOrphanTime(null);
|
||||
/* Long id = (Long) */ template.insert(INSERT_CONTENT_URL, contentUrlEntity);
|
||||
/*contentUrlEntity.setId(id);*/
|
||||
// Register the url as new
|
||||
@@ -101,12 +103,56 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
|
||||
return contentUrlEntity;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int deleteContentUrlEntity(Long id)
|
||||
public void getContentUrlsOrphaned(final ContentUrlHandler contentUrlHandler, long maxOrphanTime)
|
||||
{
|
||||
Map<String, Object> params = new HashMap<String, Object>(11);
|
||||
params.put("id", id);
|
||||
return template.delete(DELETE_CONTENT_URL, params);
|
||||
RowHandler rowHandler = new RowHandler()
|
||||
{
|
||||
public void handleRow(Object valueObject)
|
||||
{
|
||||
ContentUrlEntity contentUrlEntity = (ContentUrlEntity) valueObject;
|
||||
contentUrlHandler.handle(
|
||||
contentUrlEntity.getId(),
|
||||
contentUrlEntity.getContentUrl(),
|
||||
contentUrlEntity.getOrphanTime());
|
||||
}
|
||||
};
|
||||
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
|
||||
contentUrlEntity.setOrphanTime(maxOrphanTime);
|
||||
template.queryWithRowHandler(SELECT_CONTENT_URLS_BY_ORPHAN_TIME, contentUrlEntity, rowHandler);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void getContentUrlsOrphaned(final ContentUrlHandler contentUrlHandler, long maxOrphanTime, int maxResults)
|
||||
{
|
||||
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
|
||||
contentUrlEntity.setOrphanTime(maxOrphanTime);
|
||||
List<ContentUrlEntity> results = template.queryForList(
|
||||
SELECT_CONTENT_URLS_BY_ORPHAN_TIME,
|
||||
contentUrlEntity, 0, maxResults);
|
||||
// Pass the result to the callback
|
||||
for (ContentUrlEntity result : results)
|
||||
{
|
||||
contentUrlHandler.handle(
|
||||
result.getId(),
|
||||
result.getContentUrl(),
|
||||
result.getOrphanTime());
|
||||
}
|
||||
}
|
||||
|
||||
public int updateContentUrlOrphanTime(Long id, long orphanTime)
|
||||
{
|
||||
ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
|
||||
contentUrlEntity.setId(id);
|
||||
contentUrlEntity.setOrphanTime(orphanTime);
|
||||
return template.update(UPDATE_CONTENT_URL_ORPHAN_TIME, contentUrlEntity);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public int deleteContentUrls(List<Long> ids)
|
||||
{
|
||||
return template.delete(DELETE_CONTENT_URLS, ids);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -151,9 +197,30 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
|
||||
return contentDataEntity;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int updateContentDataEntity(ContentDataEntity entity)
|
||||
{
|
||||
entity.incrementVersion();
|
||||
return template.update(UPDATE_CONTENT_DATA, entity);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int deleteContentDataEntity(Long id)
|
||||
{
|
||||
// Get the content urls
|
||||
ContentDataEntity contentDataEntity = getContentDataEntity(id);
|
||||
// This might be null as there is no constraint ensuring that the node points to a valid ContentData entity
|
||||
if (contentDataEntity != null)
|
||||
{
|
||||
// Register the content URL for a later orphan-check
|
||||
String contentUrl = contentDataEntity.getContentUrl();
|
||||
if (contentUrl != null)
|
||||
{
|
||||
// It has been dereferenced and may be orphaned - we'll check later
|
||||
registerDereferencedContentUrl(contentUrl);
|
||||
}
|
||||
}
|
||||
// Issue the delete statement
|
||||
Map<String, Object> params = new HashMap<String, Object>(11);
|
||||
params.put("id", id);
|
||||
return template.delete(DELETE_CONTENT_DATA, params);
|
||||
@@ -175,36 +242,9 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
|
||||
// Delete each one
|
||||
for (Long id : ids)
|
||||
{
|
||||
// Get the content urls
|
||||
ContentDataEntity contentDataEntity = getContentDataEntity(id);
|
||||
// This might be null as there is no constraint ensuring that the node points to a valid ContentData entity
|
||||
if (contentDataEntity == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Only check the content URLs if one is present
|
||||
String contentUrl = contentDataEntity.getContentUrl();
|
||||
// Delete the ContentData entity
|
||||
deleteContentData(id);
|
||||
// Check if the content URL was orphaned
|
||||
if (contentUrl != null)
|
||||
{
|
||||
// It has been dereferenced and may be orphaned - we'll check later
|
||||
registerDereferenceContentUrl(contentUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void getAllContentUrls(final ContentUrlHandler contentUrlHandler)
|
||||
{
|
||||
RowHandler rowHandler = new RowHandler()
|
||||
{
|
||||
public void handleRow(Object valueObject)
|
||||
{
|
||||
contentUrlHandler.handle((String)valueObject);
|
||||
}
|
||||
};
|
||||
template.queryWithRowHandler(SELECT_CONTENT_URLS, rowHandler);
|
||||
}
|
||||
}
|
||||
|
@@ -31,6 +31,7 @@ import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper;
|
||||
import org.alfresco.repo.transaction.TransactionListenerAdapter;
|
||||
import org.alfresco.repo.transaction.TransactionalResourceHelper;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState;
|
||||
import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback;
|
||||
import org.alfresco.service.namespace.QName;
|
||||
import org.alfresco.util.GUID;
|
||||
@@ -265,8 +266,11 @@ public class JobLockServiceImpl implements JobLockService
|
||||
try
|
||||
{
|
||||
int iterations = doWithRetry(getLockCallback, retryWait, retryCount);
|
||||
// Bind in a listener
|
||||
// Bind in a listener, if we are in a transaction
|
||||
if (AlfrescoTransactionSupport.getTransactionReadState() != TxnReadState.TXN_NONE)
|
||||
{
|
||||
AlfrescoTransactionSupport.bindListener(txnListener);
|
||||
}
|
||||
// Success
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
|
@@ -41,12 +41,18 @@
|
||||
|
||||
<!-- override content store cleaner to use tenant routing file content store -->
|
||||
<!-- Performs the content cleanup -->
|
||||
<bean id="contentStoreCleaner" parent="baseContentStoreCleaner">
|
||||
<bean id="eagerContentStoreCleaner" class="org.alfresco.repo.content.cleanup.EagerContentStoreCleaner" init-method="init">
|
||||
<property name="eagerOrphanCleanup" >
|
||||
<value>${system.content.eagerOrphanCleanup}</value>
|
||||
</property>
|
||||
<property name="stores" >
|
||||
<list>
|
||||
<ref bean="tenantFileContentStore" />
|
||||
</list>
|
||||
</property>
|
||||
<property name="listeners" >
|
||||
<ref bean="deletedContentBackupListeners" />
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- override content service to use tenant routing file content store -->
|
||||
|
Reference in New Issue
Block a user