Merged HEAD-BUG-FIX (5.0/Cloud) to HEAD (5.0/Cloud)

84811: Merged PLATFORM1 (5.0/Cloud) to HEAD-BUG-FIX (5.0/Cloud)
      82464: ACE-1246 "Allow the customer to change the document encryption key"
      Encrypting content store


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@85170 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Mark Rogers
2014-09-20 08:36:54 +00:00
parent 0620f5019b
commit afe200fbd5
31 changed files with 1162 additions and 231 deletions

View File

@@ -84,7 +84,15 @@
<bean name="contentDataSharedCache" factory-bean="cacheFactory" factory-method="createCache"> <bean name="contentDataSharedCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.contentDataSharedCache"/> <constructor-arg value="cache.contentDataSharedCache"/>
</bean> </bean>
<bean name="contentUrlSharedCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.contentUrlSharedCache"/>
</bean>
<bean name="contentUrlMasterKeySharedCache" factory-bean="cacheFactory" factory-method="createCache">
<constructor-arg value="cache.contentUrlMasterKeySharedCache"/>
</bean>
<!-- ===================================== --> <!-- ===================================== -->
<!-- ID lookup for general, shared, immutable entities --> <!-- ID lookup for general, shared, immutable entities -->
<!-- ===================================== --> <!-- ===================================== -->

View File

@@ -78,6 +78,29 @@ cache.contentDataSharedCache.eviction-policy=LRU
cache.contentDataSharedCache.eviction-percentage=25 cache.contentDataSharedCache.eviction-percentage=25
cache.contentDataSharedCache.merge-policy=hz.ADD_NEW_ENTRY cache.contentDataSharedCache.merge-policy=hz.ADD_NEW_ENTRY
cache.contentUrlSharedCache.tx.maxItems=65000
cache.contentUrlSharedCache.maxItems=130000
cache.contentUrlSharedCache.timeToLiveSeconds=0
cache.contentUrlSharedCache.maxIdleSeconds=0
cache.contentUrlSharedCache.cluster.type=fully-distributed
cache.contentUrlSharedCache.backup-count=1
cache.contentUrlSharedCache.eviction-policy=LRU
cache.contentUrlSharedCache.eviction-percentage=25
cache.contentUrlSharedCache.merge-policy=hz.ADD_NEW_ENTRY
cache.contentUrlMasterKeySharedCache.tx.maxItems=65000
cache.contentUrlMasterKeySharedCache.maxItems=0
cache.contentUrlMasterKeySharedCache.timeToLiveSeconds=0
cache.contentUrlMasterKeySharedCache.maxIdleSeconds=0
cache.contentUrlMasterKeySharedCache.cluster.type=fully-distributed
cache.contentUrlMasterKeySharedCache.backup-count=1
cache.contentUrlMasterKeySharedCache.eviction-policy=NONE
cache.contentUrlMasterKeySharedCache.eviction-percentage=25
cache.contentUrlMasterKeySharedCache.merge-policy=hz.ADD_NEW_ENTRY
cache.contentUrlMasterKeySharedCache.nearCache.maxSize=50
cache.contentUrlMasterKeySharedCache.nearCache.maxIdleSeconds=0
cache.contentUrlMasterKeySharedCache.nearCache.timeToLiveSeconds=0
cache.immutableEntitySharedCache.tx.maxItems=10000 cache.immutableEntitySharedCache.tx.maxItems=10000
cache.immutableEntitySharedCache.maxItems=50000 cache.immutableEntitySharedCache.maxItems=50000
cache.immutableEntitySharedCache.timeToLiveSeconds=0 cache.immutableEntitySharedCache.timeToLiveSeconds=0

View File

@@ -168,7 +168,7 @@
<bean id="contentDataDAO" class="org.alfresco.repo.domain.contentdata.ibatis.ContentDataDAOImpl"> <bean id="contentDataDAO" class="org.alfresco.repo.domain.contentdata.ibatis.ContentDataDAOImpl">
<property name="sqlSessionTemplate" ref="contentSqlSessionTemplate"/> <property name="sqlSessionTemplate" ref="contentSqlSessionTemplate"/>
<property name="contentDataCache" ref="contentDataCache"/> <property name="contentDataCache" ref="contentDataCache"/>
<property name="controlDAO" ref="controlDAO"/> <property name="contentUrlCache" ref="contentUrlCache"/>
<property name="mimetypeDAO" ref="mimetypeDAO"/> <property name="mimetypeDAO" ref="mimetypeDAO"/>
<property name="encodingDAO" ref="encodingDAO"/> <property name="encodingDAO" ref="encodingDAO"/>
<property name="localeDAO" ref="localeDAO"/> <property name="localeDAO" ref="localeDAO"/>

View File

@@ -0,0 +1,36 @@
--
-- Title: Create Content Encryption tables
-- Database: MySQL InnoDB
-- Since: V5.0 Schema 7006
-- Author: Steve Glover
--
-- Please contact support@alfresco.com if you need assistance with the upgrade.
--
CREATE TABLE alf_content_url_encryption
(
id BIGINT NOT NULL AUTO_INCREMENT,
content_url_id BIGINT NOT NULL,
algorithm VARCHAR(10) NOT NULL,
key_size INTEGER NOT NULL,
encrypted_key BLOB NOT NULL,
master_keystore_id VARCHAR(20) NOT NULL,
master_key_alias VARCHAR(15) NOT NULL,
unencrypted_file_size BIGINT NULL,
UNIQUE INDEX idx_alf_cont_enc_url (content_url_id),
INDEX idx_alf_cont_enc_mka (master_key_alias),
CONSTRAINT fk_alf_cont_enc_url FOREIGN KEY (content_url_id) REFERENCES alf_content_url (id) ON DELETE CASCADE,
PRIMARY KEY (id)
) ENGINE=InnoDB;
--
-- Record script finish
--
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V5.0-ContentUrlEncryptionTables';
INSERT INTO alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
VALUES
(
'patch.db-V5.0-ContentUrlEncryptionTables', 'Manually executed script upgrade V5.0: Content Url Encryption Tables',
0, 8001, -1, 8002, null, 'UNKNOWN', ${TRUE}, ${TRUE}, 'Script completed'
);

View File

@@ -1145,6 +1145,74 @@
</index> </index>
</indexes> </indexes>
</table> </table>
<table name="alf_content_url_encryption">
<columns>
<column name="id" order="1">
<type>bigint</type>
<nullable>false</nullable>
<autoincrement>true</autoincrement>
</column>
<column name="content_url_id" order="2">
<type>bigint</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="algorithm" order="3">
<type>varchar(10)</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="key_size" order="4">
<type>int</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="encrypted_key" order="5">
<type>blob</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="master_keystore_id" order="6">
<type>varchar(20)</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="master_key_alias" order="7">
<type>varchar(15)</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="unencrypted_file_size" order="8">
<type>bigint</type>
<nullable>true</nullable>
<autoincrement>false</autoincrement>
</column>
</columns>
<primarykey name="PRIMARY">
<columnnames>
<columnname order="1">id</columnname>
</columnnames>
</primarykey>
<foreignkeys>
<foreignkey name="fk_alf_cont_enc_url">
<localcolumn>content_url_id</localcolumn>
<targettable>alf_content_url</targettable>
<targetcolumn>id</targetcolumn>
</foreignkey>
</foreignkeys>
<indexes>
<index name="idx_alf_cont_enc_url" unique="true">
<columnnames>
<columnname>content_url_id</columnname>
</columnnames>
</index>
<index name="idx_alf_cont_enc_mka" unique="false">
<columnnames>
<columnname>master_key_alias</columnname>
</columnnames>
</index>
</indexes>
</table>
<table name="alf_encoding"> <table name="alf_encoding">
<columns> <columns>
<column name="id" order="1"> <column name="id" order="1">

View File

@@ -0,0 +1,37 @@
--
-- Title: Create Content Encryption tables
-- Database: PostgreSQL
-- Since: V5.0 Schema 7006
-- Author: Steve Glover
--
-- Please contact support@alfresco.com if you need assistance with the upgrade.
--
CREATE SEQUENCE alf_content_url_enc_seq START WITH 1 INCREMENT BY 1;
CREATE TABLE alf_content_url_encryption
(
id INT8 NOT NULL,
content_url_id INT8 NOT NULL,
algorithm VARCHAR(10) NOT NULL,
key_size INT4 NOT NULL,
encrypted_key BYTEA NOT NULL,
master_keystore_id VARCHAR(20) NOT NULL,
master_key_alias VARCHAR(15) NOT NULL,
unencrypted_file_size INT8 NULL,
CONSTRAINT fk_alf_cont_enc_url FOREIGN KEY (content_url_id) REFERENCES alf_content_url (id) ON DELETE CASCADE,
PRIMARY KEY (id)
);
CREATE UNIQUE INDEX idx_alf_cont_enc_url ON alf_content_url_encryption (content_url_id);
CREATE INDEX idx_alf_cont_enc_mka ON alf_content_url_encryption (master_key_alias);
--
-- Record script finish
--
DELETE FROM alf_applied_patch WHERE id = 'patch.db-V5.0-ContentUrlEncryptionTables';
INSERT INTO alf_applied_patch
(id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
VALUES
(
'patch.db-V5.0-ContentUrlEncryptionTables', 'Manually executed script upgrade V5.0: Content Url Encryption Tables',
0, 8001, -1, 8002, null, 'UNKNOWN', ${TRUE}, ${TRUE}, 'Script completed'
);

View File

@@ -25,6 +25,7 @@
<sequence name="alf_child_assoc_seq"/> <sequence name="alf_child_assoc_seq"/>
<sequence name="alf_content_data_seq"/> <sequence name="alf_content_data_seq"/>
<sequence name="alf_content_url_seq"/> <sequence name="alf_content_url_seq"/>
<sequence name="alf_content_url_enc_seq"/>
<sequence name="alf_encoding_seq"/> <sequence name="alf_encoding_seq"/>
<sequence name="alf_locale_seq"/> <sequence name="alf_locale_seq"/>
<sequence name="alf_lock_resource_seq"/> <sequence name="alf_lock_resource_seq"/>
@@ -1183,6 +1184,74 @@
</index> </index>
</indexes> </indexes>
</table> </table>
<table name="alf_content_url_encryption">
<columns>
<column name="id" order="1">
<type>int8</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="content_url_id" order="2">
<type>int8</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="algorithm" order="3">
<type>varchar(10)</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="key_size" order="4">
<type>int4</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="encrypted_key" order="5">
<type>bytea</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="master_keystore_id" order="6">
<type>varchar(20)</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="master_key_alias" order="7">
<type>varchar(15)</type>
<nullable>false</nullable>
<autoincrement>false</autoincrement>
</column>
<column name="unencrypted_file_size" order="8">
<type>int8</type>
<nullable>true</nullable>
<autoincrement>false</autoincrement>
</column>
</columns>
<primarykey name="alf_content_url_encryption_pkey">
<columnnames>
<columnname order="1">id</columnname>
</columnnames>
</primarykey>
<foreignkeys>
<foreignkey name="fk_alf_cont_enc_url">
<localcolumn>content_url_id</localcolumn>
<targettable>alf_content_url</targettable>
<targetcolumn>id</targetcolumn>
</foreignkey>
</foreignkeys>
<indexes>
<index name="idx_alf_cont_enc_url" unique="true">
<columnnames>
<columnname>content_url_id</columnname>
</columnnames>
</index>
<index name="idx_alf_cont_enc_mka" unique="false">
<columnnames>
<columnname>master_key_alias</columnname>
</columnnames>
</index>
</indexes>
</table>
<table name="alf_encoding"> <table name="alf_encoding">
<columns> <columns>
<column name="id" order="1"> <column name="id" order="1">

View File

@@ -16,6 +16,7 @@
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-LockTables.sql</value> <value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-LockTables.sql</value>
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-ContentTables.sql</value> <value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-ContentTables.sql</value>
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-PropertyValueTables.sql</value> <value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-PropertyValueTables.sql</value>
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-ContentUrlEncryptionTables.sql</value>
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-AuditTables.sql</value> <value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-AuditTables.sql</value>
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-ActivityTables.sql</value> <value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-ActivityTables.sql</value>
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-UsageTables.sql</value> <value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-UsageTables.sql</value>
@@ -86,6 +87,7 @@
<ref bean="patch.db-V4.1-ChildAssoc-OrderBy" /> <ref bean="patch.db-V4.1-ChildAssoc-OrderBy" />
<ref bean="patch.db-V4.1-createIdxAlfNodeTQN" /> <ref bean="patch.db-V4.1-createIdxAlfNodeTQN" />
<ref bean="patch.db-V4.2-restructure-idx_alf_nprop_s-MSSQL" /> <ref bean="patch.db-V4.2-restructure-idx_alf_nprop_s-MSSQL" />
<ref bean="patch.db-V5.0-ContentUrlEncryptionTables" />
</list> </list>
</property> </property>
</bean> </bean>

View File

@@ -47,6 +47,7 @@ Inbound settings from iBatis
<typeAlias alias="ContentUrl" type="org.alfresco.repo.domain.contentdata.ContentUrlEntity"/> <typeAlias alias="ContentUrl" type="org.alfresco.repo.domain.contentdata.ContentUrlEntity"/>
<typeAlias alias="ContentUrlUpdate" type="org.alfresco.repo.domain.contentdata.ContentUrlUpdateEntity"/> <typeAlias alias="ContentUrlUpdate" type="org.alfresco.repo.domain.contentdata.ContentUrlUpdateEntity"/>
<typeAlias alias="ContentData" type="org.alfresco.repo.domain.contentdata.ContentDataEntity"/> <typeAlias alias="ContentData" type="org.alfresco.repo.domain.contentdata.ContentDataEntity"/>
<typeAlias alias="ContentUrlKey" type="org.alfresco.repo.domain.contentdata.ContentUrlKeyEntity"/>
<typeAlias alias="ContentUrlOrphanQuery" type="org.alfresco.repo.domain.contentdata.ContentUrlOrphanQuery"/> <typeAlias alias="ContentUrlOrphanQuery" type="org.alfresco.repo.domain.contentdata.ContentUrlOrphanQuery"/>
<!-- Locale --> <!-- Locale -->
@@ -176,6 +177,7 @@ Inbound settings from iBatis
<typeHandlers> <typeHandlers>
<typeHandler javaType="java.io.Serializable" jdbcType="BLOB" handler="org.alfresco.ibatis.SerializableTypeHandler"/> <typeHandler javaType="java.io.Serializable" jdbcType="BLOB" handler="org.alfresco.ibatis.SerializableTypeHandler"/>
<typeHandler javaType="_byte[]" jdbcType="BLOB" handler="org.alfresco.ibatis.ByteArrayTypeHandler"/>
</typeHandlers> </typeHandlers>
<mappers> <mappers>

View File

@@ -29,19 +29,36 @@
<result property="contentUrlCrc" column="content_url_crc" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="contentUrlCrc" column="content_url_crc" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="size" column="content_size" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="size" column="content_size" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="orphanTime" column="orphan_time" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="orphanTime" column="orphan_time" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="contentUrlKey.contentUrlId" column="content_url_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="contentUrlKey.keySize" column="key_size" jdbcType="INTEGER" javaType="int"/>
<result property="contentUrlKey.encryptedKeyAsBytes" column="encrypted_key" jdbcType="BLOB" javaType="_byte[]"/>
<result property="contentUrlKey.algorithm" column="algorithm" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="contentUrlKey.masterKeystoreId" column="master_keystore_id" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="contentUrlKey.masterKeyAlias" column="master_key_alias" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="contentUrlKey.unencryptedFileSize" column="unencrypted_file_size" jdbcType="BIGINT" javaType="java.lang.Long"/>
</resultMap> </resultMap>
<resultMap id="result_ContentData" type="ContentData"> <resultMap id="result_ContentData" type="ContentData">
<result property="id" column="id" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="id" column="id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="version" column="version" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="version" column="version" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="contentUrlId" column="content_url_id" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="contentUrlId" column="content_url_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="contentUrl" column="content_url" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="size" column="content_size" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="size" column="content_size" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="mimetypeId" column="content_mimetype_id" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="mimetypeId" column="content_mimetype_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="encodingId" column="content_encoding_id" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="encodingId" column="content_encoding_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="localeId" column="content_locale_id" jdbcType="BIGINT" javaType="java.lang.Long"/> <result property="localeId" column="content_locale_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
</resultMap> </resultMap>
<resultMap id="result_ContentUrlKey" type="ContentUrlKey">
<result property="id" column="id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="contentUrlId" column="content_url_id" jdbcType="BIGINT" javaType="java.lang.Long"/>
<result property="keySize" column="key_size" jdbcType="INTEGER" javaType="int"/>
<result property="encryptedKeyAsBytes" column="encrypted_key" jdbcType="BLOB" javaType="_byte[]"/>
<result property="algorithm" column="algorithm" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="masterKeystoreId" column="master_keystore_id" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="masterKeyAlias" column="master_key_alias" jdbcType="VARCHAR" javaType="java.lang.String"/>
<result property="unencryptedFileSize" column="unencrypted_file_size" jdbcType="BIGINT" javaType="java.lang.Long"/>
</resultMap>
<!-- --> <!-- -->
<!-- Parameter Maps --> <!-- Parameter Maps -->
<!-- --> <!-- -->
@@ -70,7 +87,17 @@
<parameter property="encodingId" jdbcType="BIGINT" javaType="java.lang.Long"/> <parameter property="encodingId" jdbcType="BIGINT" javaType="java.lang.Long"/>
<parameter property="localeId" jdbcType="BIGINT" javaType="java.lang.Long"/> <parameter property="localeId" jdbcType="BIGINT" javaType="java.lang.Long"/>
</parameterMap> </parameterMap>
<parameterMap id="parameter_ContentUrlKey" type="ContentUrlKey">
<parameter property="contentUrlId" jdbcType="BIGINT" javaType="java.lang.Long"/>
<parameter property="keySize" jdbcType="INTEGER" javaType="int"/>
<parameter property="algorithm" jdbcType="VARCHAR" javaType="java.lang.String"/>
<parameter property="encryptedKeyAsBytes" jdbcType="BLOB" javaType="_byte[]"/>
<parameter property="masterKeystoreId" jdbcType="VARCHAR" javaType="java.lang.String"/>
<parameter property="masterKeyAlias" jdbcType="VARCHAR" javaType="java.lang.String"/>
<parameter property="unencryptedFileSize" jdbcType="BIGINT" javaType="java.lang.Long"/>
</parameterMap>
<!-- --> <!-- -->
<!-- SQL Snippets --> <!-- SQL Snippets -->
<!-- --> <!-- -->
@@ -114,7 +141,29 @@
insert into alf_content_data (id, version, content_url_id, content_mimetype_id, content_encoding_id, content_locale_id) insert into alf_content_data (id, version, content_url_id, content_mimetype_id, content_encoding_id, content_locale_id)
values (#{id}, #{version}, #{contentUrlId,jdbcType=BIGINT}, #{mimetypeId,jdbcType=BIGINT}, #{encodingId,jdbcType=BIGINT}, #{localeId,jdbcType=BIGINT}) values (#{id}, #{version}, #{contentUrlId,jdbcType=BIGINT}, #{mimetypeId,jdbcType=BIGINT}, #{encodingId,jdbcType=BIGINT}, #{localeId,jdbcType=BIGINT})
</sql> </sql>
<sql id="insert_KeyData_AutoIncrement">
insert into alf_content_url_encryption
(
content_url_id, key_size, algorithm, encrypted_key, master_keystore_id, master_key_alias, unencrypted_file_size
)
values
(
?, ?, ?, ?, ?, ?, ?
)
</sql>
<sql id="insert_KeyData_Sequence">
insert into alf_content_url_encryption
(
id, content_url_id, key_size, algorithm, encrypted_key, master_keystore_id, master_key_alias, unencrypted_file_size
)
values
(
#{id}, #{contentUrlId}, #{keySize}, #{algorithm}, #{encryptedKeyAsBytes}, #{masterKeystoreId}, #{masterKeyAlias}, #{unencryptedFileSize}
)
</sql>
<!-- --> <!-- -->
<!-- Statements --> <!-- Statements -->
<!-- --> <!-- -->
@@ -162,19 +211,43 @@
<!-- Get the content URL entity by ID --> <!-- Get the content URL entity by ID -->
<select id="select_ContentUrlById" parameterType="ContentUrl" resultMap="result_ContentUrl"> <select id="select_ContentUrlById" parameterType="ContentUrl" resultMap="result_ContentUrl">
select select
* u.id as id,
u.content_url as content_url,
u.content_url_short as content_url_short,
u.content_url_crc as content_url_crc,
u.content_size as content_size,
u.orphan_time as orphan_time,
ce.algorithm as algorithm,
ce.key_size as key_size,
ce.encrypted_key as encrypted_key,
ce.master_keystore_id as master_keystore_id,
ce.master_key_alias as master_key_alias,
ce.unencrypted_file_size as unencrypted_file_size
from from
alf_content_url alf_content_url u
left join alf_content_url_encryption ce on (u.id = ce.content_url_id)
where where
id = #{id} u.id = #{id}
</select> </select>
<!-- Get the content URL entity by unique key --> <!-- Get the content URL entity by unique key -->
<select id="select_ContentUrlByKey" parameterType="ContentUrl" resultMap="result_ContentUrl"> <select id="select_ContentUrlByKey" parameterType="ContentUrl" resultMap="result_ContentUrl">
select select
* u.id as id,
u.content_url as content_url,
u.content_url_short as content_url_short,
u.content_url_crc as content_url_crc,
u.content_size as content_size,
u.orphan_time as orphan_time,
ce.algorithm as algorithm,
ce.key_size as key_size,
ce.encrypted_key as encrypted_key,
ce.master_keystore_id as master_keystore_id,
ce.master_key_alias as master_key_alias,
ce.unencrypted_file_size as unencrypted_file_size
from from
alf_content_url alf_content_url u
left join alf_content_url_encryption ce on (u.id = ce.content_url_id)
where where
content_url_short = #{contentUrlShort} and content_url_short = #{contentUrlShort} and
content_url_crc = #{contentUrlCrc} content_url_crc = #{contentUrlCrc}
@@ -246,13 +319,23 @@
</foreach> </foreach>
</delete> </delete>
<delete id="delete_ContentUrlKeys" parameterType="list">
delete
from
alf_content_url_encryption
where
content_url_id in
<foreach item="item" index="index" collection="list" open="(" separator="," close=")">
#{item}
</foreach>
</delete>
<!-- Get the ContentData entity by ID --> <!-- Get the ContentData entity by ID -->
<select id="select_ContentDataById" parameterMap="parameter_IdMap" resultMap="result_ContentData"> <select id="select_ContentDataById" parameterMap="parameter_IdMap" resultMap="result_ContentData">
select select
cd.id as id, cd.id as id,
cd.version as version, cd.version as version,
cd.content_url_id as content_url_id, cd.content_url_id as content_url_id,
cu.content_url as content_url,
cu.content_size as content_size, cu.content_size as content_size,
cd.content_mimetype_id as content_mimetype_id, cd.content_mimetype_id as content_mimetype_id,
cd.content_encoding_id as content_encoding_id, cd.content_encoding_id as content_encoding_id,
@@ -270,7 +353,6 @@
cd.id as id, cd.id as id,
cd.version as version, cd.version as version,
cd.content_url_id as content_url_id, cd.content_url_id as content_url_id,
cu.content_url as content_url,
cu.content_size as content_size, cu.content_size as content_size,
cd.content_mimetype_id as content_mimetype_id, cd.content_mimetype_id as content_mimetype_id,
cd.content_encoding_id as content_encoding_id, cd.content_encoding_id as content_encoding_id,
@@ -327,4 +409,66 @@
id = ? id = ?
</delete> </delete>
<select id="select_SymmetricKeyByContentId" parameterMap="parameter_IdMap" resultMap="result_ContentUrlKey">
select
e.id,
e.content_url_id,
e.key_size,
e.algorithm,
e.encrypted_key,
e.master_keystore_id,
e.master_key_alias,
e.unencrypted_file_size
from
alf_content_url_encryption e
where
e.content_url_id = #{contentUrlId}
</select>
<update id="update_KeyData" parameterType="ContentUrlKey">
update
alf_content_url_encryption
set
key_size = #{keySize},
algorithm = #{algorithm},
encrypted_key = #{encryptedKeyAsBytes},
master_keystore_id = #{masterKeystoreId},
master_key_alias = #{masterKeyAlias},
unencrypted_file_size = #{unencryptedFileSize}
where
id = #{id}
</update>
<delete id="delete_KeyData" parameterMap="parameter_IdMap">
delete
from
alf_content_url_encryption e
where
id = ?
</delete>
<select id="select_SymmetricKeysByMasterKey" parameterType="ContentUrlKey" resultMap="result_ContentUrlKey">
select
e.id,
e.content_url_id,
e.key_size,
e.algorithm,
e.encrypted_key,
e.master_keystore_id,
e.master_key_alias,
e.unencrypted_file_size
from
alf_content_url_encryption e
where
e.master_key_alias = #{masterKeyAlias}
and <![CDATA[e.id > #{id}]]>
</select>
<select id="select_CountSymmetricKeysByMasterKey" parameterType="String" resultType="java.lang.Integer">
select count(*)
from
alf_content_url_encryption e
where
e.master_key_alias = #{masterKeyAlias}
</select>
</mapper> </mapper>

View File

@@ -20,4 +20,7 @@
<include refid="alfresco.content.insert_ContentData_AutoIncrement"/> <include refid="alfresco.content.insert_ContentData_AutoIncrement"/>
</insert> </insert>
<insert id="insert_KeyData" parameterMap="alfresco.content.parameter_ContentUrlKey" useGeneratedKeys="true" keyProperty="id">
<include refid="alfresco.content.insert_KeyData_AutoIncrement"/>
</insert>
</mapper> </mapper>

View File

@@ -43,5 +43,12 @@
<include refid="alfresco.content.insert_ContentData_Sequence"/> <include refid="alfresco.content.insert_ContentData_Sequence"/>
</insert> </insert>
<insert id="insert_KeyData" parameterType="ContentUrlKey">
<selectKey resultType="long" keyProperty="id" order="BEFORE" >
select nextVal('alf_content_url_enc_seq')
</selectKey>
<include refid="alfresco.content.insert_KeyData_Sequence"/>
</insert>
</mapper> </mapper>

View File

@@ -3276,25 +3276,25 @@
</bean> </bean>
<!-- Add cm:indexControl aspect to surf-config folders and their children --> <!-- Add cm:indexControl aspect to surf-config folders and their children -->
<bean id="patch.surfConfigFolder" class="org.alfresco.repo.admin.patch.impl.SurfConfigFolderPatch" parent="basePatch" > <bean id="patch.surfConfigFolder" class="org.alfresco.repo.admin.patch.impl.SurfConfigFolderPatch" parent="basePatch" >
<property name="id"><value>patch.surfConfigFolder</value></property> <property name="id"><value>patch.surfConfigFolder</value></property>
<property name="description"><value>patch.surfConfigFolderPatch.description</value></property> <property name="description"><value>patch.surfConfigFolderPatch.description</value></property>
<property name="fixesFromSchema"><value>0</value></property> <property name="fixesFromSchema"><value>0</value></property>
<property name="fixesToSchema"><value>7004</value></property> <property name="fixesToSchema"><value>7004</value></property>
<property name="targetSchema"><value>7005</value></property> <property name="targetSchema"><value>7005</value></property>
<property name="requiresTransaction"><value>false</value></property> <property name="requiresTransaction"><value>false</value></property>
<property name="applyToTenants"><value>false</value></property> <property name="applyToTenants"><value>false</value></property>
<!-- We have to ignore it, as this patch will be run by the scheduler in the background. The AsynchronousPatch will take care of registration --> <!-- We have to ignore it, as this patch will be run by the scheduler in the background. The AsynchronousPatch will take care of registration -->
<property name="ignored"><value>${system.patch.surfConfigFolder.deferred}</value></property> <property name="ignored"><value>${system.patch.surfConfigFolder.deferred}</value></property>
<property name="patchDAO" ref="patchDAO" /> <property name="patchDAO" ref="patchDAO" />
<property name="nodeDAO" ref="nodeDAO" /> <property name="nodeDAO" ref="nodeDAO" />
<property name="qnameDAO" ref="qnameDAO" /> <property name="qnameDAO" ref="qnameDAO" />
<property name="behaviourFilter" ref="policyBehaviourFilter" /> <property name="behaviourFilter" ref="policyBehaviourFilter" />
<property name="ruleService" ref="ruleService" /> <property name="ruleService" ref="ruleService" />
<property name="jobLockService" ref="jobLockService" /> <property name="jobLockService" ref="jobLockService" />
<!-- Do we deferr running the surf-config folder patch? --> <!-- Do we deferr running the surf-config folder patch? -->
<property name="deferred"><value>${system.patch.surfConfigFolder.deferred}</value></property> <property name="deferred"><value>${system.patch.surfConfigFolder.deferred}</value></property>
</bean> </bean>
<bean id="patch.renameSiteAuthorityDisplayName" <bean id="patch.renameSiteAuthorityDisplayName"
class="org.alfresco.repo.admin.patch.impl.RenameSiteAuthorityDisplayName" class="org.alfresco.repo.admin.patch.impl.RenameSiteAuthorityDisplayName"
@@ -3376,4 +3376,14 @@
</property> </property>
</bean> </bean>
<bean id="patch.db-V5.0-ContentUrlEncryptionTables" class="org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch" parent="basePatch">
<property name="id"><value>patch.db-V5.0-ContentUrlEncryptionTables</value></property>
<property name="description"><value>patch.schemaUpgradeScript.description</value></property>
<property name="fixesFromSchema"><value>0</value></property>
<property name="fixesToSchema"><value>8003</value></property>
<property name="targetSchema"><value>8004</value></property>
<property name="scriptUrl">
<value>classpath:alfresco/dbscripts/create/${db.script.dialect}/AlfrescoCreate-ContentUrlEncryptionTables.sql</value>
</property>
</bean>
</beans> </beans>

View File

@@ -63,7 +63,32 @@
<property name="disableSharedCache" value="${system.cache.disableMutableSharedCaches}" /> <property name="disableSharedCache" value="${system.cache.disableMutableSharedCaches}" />
</bean> </bean>
<bean name="contentUrlCache" class="org.alfresco.repo.cache.TransactionalCache">
<property name="sharedCache">
<ref bean="contentUrlSharedCache" />
</property>
<property name="name">
<value>org.alfresco.cache.contentUrlTransactionalCache</value>
</property>
<property name="maxCacheSize" value="${cache.contentUrlSharedCache.tx.maxItems}" />
<property name="mutable" value="true" />
<property name="allowEqualsChecks" value="true" />
<property name="disableSharedCache" value="${system.cache.disableMutableSharedCaches}" />
</bean>
<bean name="contentUrlMasterKeyCache" class="org.alfresco.repo.cache.TransactionalCache">
<property name="sharedCache">
<ref bean="contentUrlMasterKeySharedCache" />
</property>
<property name="name">
<value>org.alfresco.cache.contentUrlMasterKeyTransactionalCache</value>
</property>
<property name="maxCacheSize" value="${cache.contentUrlMasterKeySharedCache.tx.maxItems}" />
<property name="mutable" value="true" />
<property name="allowEqualsChecks" value="false" />
<property name="disableSharedCache" value="${system.cache.disableMutableSharedCaches}" />
</bean>
<!-- The transactional cache for immutable entities --> <!-- The transactional cache for immutable entities -->
<bean name="immutableEntityCache" class="org.alfresco.repo.cache.TransactionalCache"> <bean name="immutableEntityCache" class="org.alfresco.repo.cache.TransactionalCache">

View File

@@ -1120,7 +1120,7 @@ abstract public class AbstractMappingMetadataExtracter implements MetadataExtrac
if (!isSupported(mimetype)) if (!isSupported(mimetype))
{ {
throw new AlfrescoRuntimeException( throw new AlfrescoRuntimeException(
"Metadata extracter does not support mimetype: \n" + "Metadata extracter does not support mimetype: " + mimetype + "\n" +
" reader: " + reader + "\n" + " reader: " + reader + "\n" +
" supported: " + supportedMimetypes + "\n" + " supported: " + supportedMimetypes + "\n" +
" extracter: " + this); " extracter: " + this);

View File

@@ -19,7 +19,6 @@
package org.alfresco.repo.domain.contentdata; package org.alfresco.repo.domain.contentdata;
import java.io.Serializable; import java.io.Serializable;
import java.sql.Savepoint;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
@@ -29,7 +28,6 @@ import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.cache.lookup.EntityLookupCache; import org.alfresco.repo.cache.lookup.EntityLookupCache;
import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor; import org.alfresco.repo.cache.lookup.EntityLookupCache.EntityLookupCallbackDAOAdaptor;
import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner; import org.alfresco.repo.content.cleanup.EagerContentStoreCleaner;
import org.alfresco.repo.domain.control.ControlDAO;
import org.alfresco.repo.domain.encoding.EncodingDAO; import org.alfresco.repo.domain.encoding.EncodingDAO;
import org.alfresco.repo.domain.locale.LocaleDAO; import org.alfresco.repo.domain.locale.LocaleDAO;
import org.alfresco.repo.domain.mimetype.MimetypeDAO; import org.alfresco.repo.domain.mimetype.MimetypeDAO;
@@ -54,11 +52,14 @@ import org.springframework.dao.DataIntegrityViolationException;
* IDs into <code>ContentData</code> instances. * IDs into <code>ContentData</code> instances.
* *
* @author Derek Hulley * @author Derek Hulley
* @author sglover
* @since 3.2 * @since 3.2
*/ */
public abstract class AbstractContentDataDAOImpl implements ContentDataDAO public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{ {
private static final String CACHE_REGION_CONTENT_DATA = "ContentData"; private static final String CACHE_REGION_CONTENT_DATA = "ContentData";
private static final String CACHE_REGION_CONTENT_URL = "ContentUrl";
/** /**
* Content URL IDs to delete before final commit. * Content URL IDs to delete before final commit.
*/ */
@@ -67,10 +68,10 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
private static Log logger = LogFactory.getLog(AbstractContentDataDAOImpl.class); private static Log logger = LogFactory.getLog(AbstractContentDataDAOImpl.class);
private final ContentDataCallbackDAO contentDataCallbackDAO; private final ContentDataCallbackDAO contentDataCallbackDAO;
private ControlDAO controlDAO; private final ContentUrlCallbackDAO contentUrlCallbackDAO;
private MimetypeDAO mimetypeDAO; protected MimetypeDAO mimetypeDAO;
private EncodingDAO encodingDAO; protected EncodingDAO encodingDAO;
private LocaleDAO localeDAO; protected LocaleDAO localeDAO;
private EagerContentStoreCleaner contentStoreCleaner; private EagerContentStoreCleaner contentStoreCleaner;
/** /**
@@ -80,24 +81,20 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
* VALUE KEY: NONE<br/> * VALUE KEY: NONE<br/>
*/ */
private EntityLookupCache<Long, ContentData, Serializable> contentDataCache; private EntityLookupCache<Long, ContentData, Serializable> contentDataCache;
private EntityLookupCache<Long, ContentUrlEntity, String> contentUrlCache;
/** /**
* Default constructor * Default constructor
*/ */
public AbstractContentDataDAOImpl() public AbstractContentDataDAOImpl()
{ {
this.contentDataCallbackDAO = new ContentDataCallbackDAO(); this.contentDataCallbackDAO = new ContentDataCallbackDAO();
this.contentUrlCallbackDAO = new ContentUrlCallbackDAO();
this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>(contentDataCallbackDAO); this.contentDataCache = new EntityLookupCache<Long, ContentData, Serializable>(contentDataCallbackDAO);
this.contentUrlCache = new EntityLookupCache<Long, ContentUrlEntity, String>(contentUrlCallbackDAO);
} }
/**
* @param controlDAO create Savepoints
*/
public void setControlDAO(ControlDAO controlDAO)
{
this.controlDAO = controlDAO;
}
public void setMimetypeDAO(MimetypeDAO mimetypeDAO) public void setMimetypeDAO(MimetypeDAO mimetypeDAO)
{ {
this.mimetypeDAO = mimetypeDAO; this.mimetypeDAO = mimetypeDAO;
@@ -133,7 +130,15 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
CACHE_REGION_CONTENT_DATA, CACHE_REGION_CONTENT_DATA,
contentDataCallbackDAO); contentDataCallbackDAO);
} }
public void setContentUrlCache(SimpleCache<Long, ContentUrlEntity> contentUrlCache)
{
this.contentUrlCache = new EntityLookupCache<Long, ContentUrlEntity, String>(
contentUrlCache,
CACHE_REGION_CONTENT_URL,
contentUrlCallbackDAO);
}
/** /**
* A <b>content_url</b> entity was dereferenced. This makes no assumptions about the * A <b>content_url</b> entity was dereferenced. This makes no assumptions about the
* current references - dereference deletion is handled in the commit phase. * current references - dereference deletion is handled in the commit phase.
@@ -179,6 +184,49 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return entityPair; return entityPair;
} }
@Override
public void updateContentUrl(ContentUrlEntity contentUrl)
{
if (contentUrl == null)
{
throw new IllegalArgumentException("Cannot look up ContentData by null ID.");
}
Pair<Long, ContentUrlEntity> pair = contentUrlCache.getByValue(contentUrl);
if(pair != null)
{
contentUrlCache.updateValue(pair.getFirst(), contentUrl);
}
else
{
pair = contentUrlCache.getOrCreateByValue(contentUrl);
contentUrlCache.updateValue(pair.getFirst(), contentUrl);
}
}
@Override
public ContentUrlEntity getContentUrl(String contentUrl)
{
if (contentUrl == null)
{
throw new IllegalArgumentException("Cannot look up ContentData by null ID.");
}
ContentUrlEntity entity = new ContentUrlEntity();
entity.setContentUrl(contentUrl);
Pair<Long, ContentUrlEntity> pair = contentUrlCache.getByValue(entity);
return (pair == null ? null : pair.getSecond());
}
@Override
public ContentUrlEntity getContentUrl(Long contentUrlId)
{
if (contentUrlId == null)
{
throw new IllegalArgumentException("Cannot look up ContentData by null ID.");
}
Pair<Long, ContentUrlEntity> pair = contentUrlCache.getByKey(contentUrlId);
return (pair == null ? null : pair.getSecond());
}
public void cacheContentDataForNodes(Set<Long> nodeIds) public void cacheContentDataForNodes(Set<Long> nodeIds)
{ {
for (ContentDataEntity entity : getContentDataEntitiesForNodes(nodeIds)) for (ContentDataEntity entity : getContentDataEntitiesForNodes(nodeIds))
@@ -245,7 +293,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
} }
ContentData contentData = makeContentData(contentDataEntity); ContentData contentData = makeContentData(contentDataEntity);
// Done // Done
return new Pair<Long, ContentData>(key, contentData); return new Pair<Long, ContentData>(key, contentData);
} }
@Override @Override
@@ -265,15 +313,89 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
return deleteContentDataEntity(key); return deleteContentDataEntity(key);
} }
} }
/**
* Callback for <b>alf_content_url</b> DAO.
*/
private class ContentUrlCallbackDAO extends EntityLookupCallbackDAOAdaptor<Long, ContentUrlEntity, String>
{
/**
* @return Returns the Node's NodeRef
*/
@Override
public String getValueKey(ContentUrlEntity value)
{
return value.getContentUrl();
}
/**
* Looks the node up based on the NodeRef of the given node
*/
@Override
public Pair<Long, ContentUrlEntity> findByValue(ContentUrlEntity entity)
{
String contentUrl = entity.getContentUrl();
ContentUrlEntity ret = getContentUrlEntity(contentUrl);
return (ret != null ? new Pair<Long, ContentUrlEntity>(ret.getId(), ret) : null);
}
public Pair<Long, ContentUrlEntity> createValue(ContentUrlEntity value)
{
ContentUrlEntity contentUrlEntity = createContentUrlEntity(value.getContentUrl(), value.getSize(), value.getContentUrlKey());
// Done
return new Pair<Long, ContentUrlEntity>(contentUrlEntity.getId(), contentUrlEntity);
}
public Pair<Long, ContentUrlEntity> findByKey(Long id)
{
ContentUrlEntity contentUrlEntity = getContentUrlEntity(id);
if (contentUrlEntity == null)
{
return null;
}
// Done
return new Pair<Long, ContentUrlEntity>(contentUrlEntity.getId(), contentUrlEntity);
}
@Override
public int updateValue(Long id, ContentUrlEntity value)
{
ContentUrlEntity contentUrlEntity = getContentUrlEntity(id);
if (contentUrlEntity == null)
{
return 0; // The client (outer-level code) will decide if this is an error
}
return updateContentUrlEntity(contentUrlEntity, value);
}
@Override
public int deleteByKey(Long id)
{
return deleteContentUrlEntity(id);
}
}
/** /**
* Translates this instance into an externally-usable <code>ContentData</code> instance. * Translates this instance into an externally-usable <code>ContentData</code> instance.
*/ */
private ContentData makeContentData(ContentDataEntity contentDataEntity) private ContentData makeContentData(ContentDataEntity contentDataEntity)
{ {
// Decode content URL // Decode content URL
String contentUrl = contentDataEntity.getContentUrl(); Long contentUrlId = contentDataEntity.getContentUrlId();
String contentUrl = null;
if(contentUrlId != null)
{
Pair<Long, ContentUrlEntity> entityPair = contentUrlCache.getByKey(contentUrlId);
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + contentUrlId);
}
ContentUrlEntity contentUrlEntity = entityPair.getSecond();
contentUrl = contentUrlEntity.getContentUrl();
}
long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue(); long size = contentDataEntity.getSize() == null ? 0L : contentDataEntity.getSize().longValue();
// Decode mimetype // Decode mimetype
Long mimetypeId = contentDataEntity.getMimetypeId(); Long mimetypeId = contentDataEntity.getMimetypeId();
String mimetype = null; String mimetype = null;
@@ -281,6 +403,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{ {
mimetype = mimetypeDAO.getMimetype(mimetypeId).getSecond(); mimetype = mimetypeDAO.getMimetype(mimetypeId).getSecond();
} }
// Decode encoding // Decode encoding
Long encodingId = contentDataEntity.getEncodingId(); Long encodingId = contentDataEntity.getEncodingId();
String encoding = null; String encoding = null;
@@ -288,6 +411,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{ {
encoding = encodingDAO.getEncoding(encodingId).getSecond(); encoding = encodingDAO.getEncoding(encodingId).getSecond();
} }
// Decode locale // Decode locale
Long localeId = contentDataEntity.getLocaleId(); Long localeId = contentDataEntity.getLocaleId();
Locale locale = null; Locale locale = null;
@@ -295,16 +419,17 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{ {
locale = localeDAO.getLocalePair(localeId).getSecond(); locale = localeDAO.getLocalePair(localeId).getSecond();
} }
// Build the ContentData // Build the ContentData
ContentData contentData = new ContentData(contentUrl, mimetype, size, encoding, locale); ContentData contentData = new ContentData(contentUrl, mimetype, size, encoding, locale);
// Done // Done
return contentData; return contentData;
} }
/** /**
* Translates the {@link ContentData} into persistable values using the helper DAOs * Translates the {@link ContentData} into persistable values using the helper DAOs
*/ */
private ContentDataEntity createContentDataEntity(ContentData contentData) protected ContentDataEntity createContentDataEntity(ContentData contentData)
{ {
// Resolve the content URL // Resolve the content URL
Long contentUrlId = null; Long contentUrlId = null;
@@ -312,9 +437,13 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
long size = contentData.getSize(); long size = contentData.getSize();
if (contentUrl != null) if (contentUrl != null)
{ {
// We must find or create the ContentUrlEntity ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlId = getOrCreateContentUrlEntity(contentUrl, size).getId(); contentUrlEntity.setContentUrl(contentUrl);
contentUrlEntity.setSize(size);
Pair<Long, ContentUrlEntity> pair = contentUrlCache.getOrCreateByValue(contentUrlEntity);
contentUrlId = pair.getFirst();
} }
// Resolve the mimetype // Resolve the mimetype
Long mimetypeId = null; Long mimetypeId = null;
String mimetype = contentData.getMimetype(); String mimetype = contentData.getMimetype();
@@ -346,10 +475,22 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
/** /**
* Translates the {@link ContentData} into persistable values using the helper DAOs * Translates the {@link ContentData} into persistable values using the helper DAOs
*/ */
private int updateContentDataEntity(ContentDataEntity contentDataEntity, ContentData contentData) protected int updateContentDataEntity(ContentDataEntity contentDataEntity, ContentData contentData)
{ {
// Resolve the content URL // Resolve the content URL
String oldContentUrl = contentDataEntity.getContentUrl(); Long oldContentUrlId = contentDataEntity.getContentUrlId();
ContentUrlEntity contentUrlEntity = null;
if(oldContentUrlId != null)
{
Pair<Long, ContentUrlEntity> entityPair = contentUrlCache.getByKey(oldContentUrlId);
if (entityPair == null)
{
throw new DataIntegrityViolationException("No ContentUrl value exists for ID " + oldContentUrlId);
}
contentUrlEntity = entityPair.getSecond();
}
String oldContentUrl = (contentUrlEntity != null ? contentUrlEntity.getContentUrl() : null);
String newContentUrl = contentData.getContentUrl(); String newContentUrl = contentData.getContentUrl();
if (!EqualsHelper.nullSafeEquals(oldContentUrl, newContentUrl)) if (!EqualsHelper.nullSafeEquals(oldContentUrl, newContentUrl))
{ {
@@ -360,16 +501,23 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
} }
if (newContentUrl != null) if (newContentUrl != null)
{ {
Long contentUrlId = getOrCreateContentUrlEntity(newContentUrl, contentData.getSize()).getId(); if(contentUrlEntity == null)
contentDataEntity.setContentUrlId(contentUrlId); {
contentDataEntity.setContentUrl(newContentUrl); contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setContentUrl(newContentUrl);
}
Pair<Long, ContentUrlEntity> pair = contentUrlCache.getOrCreateByValue(contentUrlEntity);
Long newContentUrlId = pair.getFirst();
contentUrlEntity.setId(newContentUrlId);
contentDataEntity.setContentUrlId(newContentUrlId);
} }
else else
{ {
contentDataEntity.setId(null);
contentDataEntity.setContentUrlId(null); contentDataEntity.setContentUrlId(null);
contentDataEntity.setContentUrl(null);
} }
} }
// Resolve the mimetype // Resolve the mimetype
Long mimetypeId = null; Long mimetypeId = null;
String mimetype = contentData.getMimetype(); String mimetype = contentData.getMimetype();
@@ -391,84 +539,27 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
{ {
localeId = localeDAO.getOrCreateLocalePair(locale).getFirst(); localeId = localeDAO.getOrCreateLocalePair(locale).getFirst();
} }
contentDataEntity.setMimetypeId(mimetypeId); contentDataEntity.setMimetypeId(mimetypeId);
contentDataEntity.setEncodingId(encodingId); contentDataEntity.setEncodingId(encodingId);
contentDataEntity.setLocaleId(localeId); contentDataEntity.setLocaleId(localeId);
return updateContentDataEntity(contentDataEntity);
}
/** return updateContentDataEntity(contentDataEntity);
* Method to create (or get an existing) content URL. The URL will be unorphaned
* whether it has been created or is being re-used.
* @param isReferenced if <code>true</code> we won't worry about eagerly deleting the content on transaction rollback
*/
private ContentUrlEntity getOrCreateContentUrlEntity(String contentUrl, long size)
{
// Try to insert the content first. Usually, the insert will not clash with anything
// as content URL re-use is far less frequent than new content creation.
ContentUrlEntity contentUrlEntity = null;
Savepoint savepoint = controlDAO.createSavepoint("getOrCreateContentUrlEntity");
try
{
contentUrlEntity = createContentUrlEntity(contentUrl, size);
controlDAO.releaseSavepoint(savepoint);
}
catch (RuntimeException e)
{
controlDAO.rollbackToSavepoint(savepoint);
// See if this was caused by an existing URL
contentUrlEntity = getContentUrlEntity(contentUrl);
// If it exists, then we can just re-use it, but check that the size is consistent
if (contentUrlEntity == null)
{
// The error was caused by something else. Perhaps another, as-yet-unseen
// row clashes with this. Just propagate the exception and let retrying
// happen as required.
throw e;
}
// Reuse it
long existingSize = contentUrlEntity.getSize();
if (size != existingSize)
{
logger.warn(
"Re-using Content URL, but size is mismatched: \n" +
" Inbound: " + contentUrl + "\n" +
" Existing: " + contentUrlEntity);
}
// Check orphan state
Long oldOrphanTime = contentUrlEntity.getOrphanTime();
if (oldOrphanTime != null)
{
Long id = contentUrlEntity.getId();
int updated = updateContentUrlOrphanTime(id, null, oldOrphanTime);
if (updated == 0)
{
throw new ConcurrencyFailureException("Failed to remove orphan time: " + contentUrlEntity);
}
}
}
// Done
return contentUrlEntity;
} }
/** /**
* @param contentUrl the content URL to create or search for * @param contentUrl the content URL to create or search for
*/ */
protected abstract ContentUrlEntity createContentUrlEntity(String contentUrl, long size); protected abstract ContentUrlEntity createContentUrlEntity(String contentUrl, long size, ContentUrlKeyEntity contentUrlKey);
/** /**
* @param id the ID of the <b>content url</b> entity * @param id the ID of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist * @return Return the entity or <tt>null</tt> if it doesn't exist
*/ */
protected abstract ContentUrlEntity getContentUrlEntity(Long id); protected abstract ContentUrlEntity getContentUrlEntity(Long id);
/**
* @param contentUrl the URL of the <b>content url</b> entity
* @return Return the entity or <tt>null</tt> if it doesn't exist
*/
protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl); protected abstract ContentUrlEntity getContentUrlEntity(String contentUrl);
/** /**
* @param contentUrl the URL of the <b>content url</b> entity * @param contentUrl the URL of the <b>content url</b> entity
@@ -501,7 +592,7 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
* @return Returns the entity or <tt>null</tt> if it doesn't exist * @return Returns the entity or <tt>null</tt> if it doesn't exist
*/ */
protected abstract ContentDataEntity getContentDataEntity(Long id); protected abstract ContentDataEntity getContentDataEntity(Long id);
/** /**
* @param nodeIds the node ID * @param nodeIds the node ID
@@ -516,14 +607,16 @@ public abstract class AbstractContentDataDAOImpl implements ContentDataDAO
* @return Returns the number of rows updated (should be 1) * @return Returns the number of rows updated (should be 1)
*/ */
protected abstract int updateContentDataEntity(ContentDataEntity entity); protected abstract int updateContentDataEntity(ContentDataEntity entity);
/** /**
* Delete the entity with the given ID * Delete the entity with the given ID
* *
* @return Returns the number of rows deleted * @return Returns the number of rows deleted
*/ */
protected abstract int deleteContentDataEntity(Long id); protected abstract int deleteContentDataEntity(Long id);
protected abstract int deleteContentUrlEntity(long id);
protected abstract int updateContentUrlEntity(ContentUrlEntity existing, ContentUrlEntity entity);
/** /**
* Transactional listener that deletes unreferenced <b>content_url</b> entities. * Transactional listener that deletes unreferenced <b>content_url</b> entities.
* *

View File

@@ -118,4 +118,51 @@ public interface ContentDataDAO
* Delete a batch of content URL entities. * Delete a batch of content URL entities.
*/ */
int deleteContentUrls(List<Long> ids); int deleteContentUrls(List<Long> ids);
/**
* Get a content url entity by contentUrl
*
* @since 5.0
* @param contentUrl
* @return
*/
ContentUrlEntity getContentUrl(String contentUrl);
/**
* Get a content url entity by contentUrlId
*
* @since 5.0
* @param contentUrlId
* @return
*/
ContentUrlEntity getContentUrl(Long contentUrlId);
/**
* Update a content url
*
* @since 5.0
* @param contentUrlEntity
* @return
*/
void updateContentUrl(ContentUrlEntity contentUrlEntity);
/**
* Get symmetric keys entities for symmetric keys that have been encrypted using the given
* master key, starting from 'fromId' and returning at most 'maxResults' entities.
*
* @since 5.0
* @param contentUrlEntity
* @return
*/
List<ContentUrlKeyEntity> getSymmetricKeysByMasterKeyAlias(String masterKeyAlias, long fromId, int maxResults);
/**
* Count symmetric keys entities for symmetric keys that have been encrypted using the given
* master key
*
* @since 5.0
* @param masterKeyAlias
* @return
*/
int countSymmetricKeysForMasterKeyAlias(String masterKeyAlias);
} }

View File

@@ -36,7 +36,6 @@ public class ContentDataEntity
private Long id; private Long id;
private Long version; private Long version;
private Long contentUrlId; private Long contentUrlId;
private String contentUrl;
private Long size; private Long size;
private Long mimetypeId; private Long mimetypeId;
private Long encodingId; private Long encodingId;
@@ -45,8 +44,8 @@ public class ContentDataEntity
public ContentDataEntity() public ContentDataEntity()
{ {
} }
@Override @Override
public int hashCode() public int hashCode()
{ {
return (id == null ? 0 : id.hashCode()); return (id == null ? 0 : id.hashCode());
@@ -69,15 +68,14 @@ public class ContentDataEntity
return false; return false;
} }
} }
@Override @Override
public String toString() public String toString()
{ {
StringBuilder sb = new StringBuilder(512); StringBuilder sb = new StringBuilder(512);
sb.append("ContentDataEntity") sb.append("ContentDataEntity")
.append("[ ID=").append(id) .append("[ ID=").append(id)
.append(", contentUrlId=").append(contentUrlId) .append(", contentUrlId=").append(contentUrlId)
.append(", contentUrl=").append(contentUrl)
.append(", size=").append(size) .append(", size=").append(size)
.append(", mimetype=").append(mimetypeId) .append(", mimetype=").append(mimetypeId)
.append(", encoding=").append(encodingId) .append(", encoding=").append(encodingId)
@@ -128,16 +126,6 @@ public class ContentDataEntity
this.contentUrlId = contentUrlId; this.contentUrlId = contentUrlId;
} }
public String getContentUrl()
{
return contentUrl;
}
public void setContentUrl(String contentUrl)
{
this.contentUrl = contentUrl;
}
public Long getSize() public Long getSize()
{ {
return size; return size;

View File

@@ -18,6 +18,8 @@
*/ */
package org.alfresco.repo.domain.contentdata; package org.alfresco.repo.domain.contentdata;
import java.io.Serializable;
import org.alfresco.repo.domain.CrcHelper; import org.alfresco.repo.domain.CrcHelper;
import org.alfresco.util.EqualsHelper; import org.alfresco.util.EqualsHelper;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
@@ -31,9 +33,13 @@ import org.alfresco.util.Pair;
* @author Derek Hulley * @author Derek Hulley
* @since 3.2 * @since 3.2
*/ */
public class ContentUrlEntity public class ContentUrlEntity implements Serializable
{ {
public static final Long CONST_LONG_ZERO = new Long(0L); /**
*
*/
private static final long serialVersionUID = -7697859151521433536L;
public static final Long CONST_LONG_ZERO = new Long(0L);
public static final String EMPTY_URL = "empty"; public static final String EMPTY_URL = "empty";
private Long id; private Long id;
@@ -42,7 +48,9 @@ public class ContentUrlEntity
private long contentUrlCrc; private long contentUrlCrc;
private long size; private long size;
private Long orphanTime; private Long orphanTime;
private ContentUrlKeyEntity contentUrlKey;
public ContentUrlEntity() public ContentUrlEntity()
{ {
this.size = 0L; this.size = 0L;
@@ -114,7 +122,31 @@ public class ContentUrlEntity
} }
} }
public Long getId() public ContentUrlKeyEntity getContentUrlKey()
{
return contentUrlKey;
}
public static ContentUrlEntity setContentUrlKey(ContentUrlEntity existing, ContentUrlKeyEntity contentUrlKey)
{
ContentUrlEntity ret = new ContentUrlEntity();
ret.setContentUrl(existing == null ? null : existing.getContentUrl());
ret.setContentUrlShort(existing == null ? null : existing.getContentUrlShort());
ret.setContentUrlCrc(existing == null ? null : existing.getContentUrlCrc());
ret.setContentUrlKey(contentUrlKey);
ret.setOrphanTime(existing == null ? null : existing.getOrphanTime());
ret.setSize(existing == null ? null : existing.getSize());
ret.setId(existing == null ? null : existing.getId());
// done
return ret;
}
public void setContentUrlKey(ContentUrlKeyEntity contentUrlKey)
{
this.contentUrlKey = contentUrlKey;
}
public Long getId()
{ {
return id; return id;
} }

View File

@@ -0,0 +1,150 @@
package org.alfresco.repo.domain.contentdata;
import java.io.Serializable;
import java.nio.ByteBuffer;
import org.alfresco.service.cmr.repository.ContentUrlKey;
import org.apache.commons.codec.DecoderException;
/**
*
* @author sglover
*
*/
public class ContentUrlKeyEntity implements Serializable
{
private static final long serialVersionUID = -6594309522849585169L;
private Long id;
private Long contentUrlId;
private byte[] encryptedKeyAsBytes;
private Integer keySize;
private String algorithm;
private String masterKeystoreId;
private String masterKeyAlias;
private Long unencryptedFileSize;
public ContentUrlKeyEntity()
{
}
public ContentUrlKey getContentUrlKey() throws DecoderException
{
ContentUrlKey contentUrlKey = new ContentUrlKey();
contentUrlKey.setAlgorithm(algorithm);
contentUrlKey.setKeySize(keySize);
contentUrlKey.setEncryptedKeyBytes(ByteBuffer.wrap(encryptedKeyAsBytes));
contentUrlKey.setMasterKeyAlias(masterKeyAlias);
contentUrlKey.setMasterKeystoreId(masterKeystoreId);
contentUrlKey.setUnencryptedFileSize(unencryptedFileSize);
return contentUrlKey;
}
public Long getContentUrlId()
{
return contentUrlId;
}
public void setContentUrlId(Long contentUrlId)
{
this.contentUrlId = contentUrlId;
}
public void setEncryptedKeyAsBytes(byte[] encryptedKeyAsBytes)
{
this.encryptedKeyAsBytes = encryptedKeyAsBytes;
}
public byte[] getEncryptedKeyAsBytes()
{
return encryptedKeyAsBytes;
}
public void updateEncryptedKey(EncryptedKey encryptedKey)
{
byte[] encryptedKeyAsBytes = new byte[encryptedKey.getByteBuffer().remaining()];
encryptedKey.getByteBuffer().get(encryptedKeyAsBytes);
this.encryptedKeyAsBytes = encryptedKeyAsBytes;
setKeySize(encryptedKeyAsBytes.length*8);
setAlgorithm(encryptedKey.getAlgorithm());
setMasterKeyAlias(encryptedKey.getMasterKeyAlias());
setMasterKeystoreId(encryptedKey.getMasterKeystoreId());
}
public Long getId()
{
return id;
}
public void setId(Long id)
{
this.id = id;
}
public EncryptedKey getEncryptedKey() throws DecoderException
{
EncryptedKey encryptedKey = new EncryptedKey(getMasterKeystoreId(), getMasterKeyAlias(),
getAlgorithm(), ByteBuffer.wrap(this.encryptedKeyAsBytes));
return encryptedKey;
}
public Long getUnencryptedFileSize()
{
return unencryptedFileSize;
}
public void setUnencryptedFileSize(Long unencryptedFileSize)
{
this.unencryptedFileSize = unencryptedFileSize;
}
public void setKeySize(Integer keySize)
{
this.keySize = keySize;
}
public Integer getKeySize()
{
return keySize;
}
public String getAlgorithm()
{
return algorithm;
}
public void setAlgorithm(String algorithm)
{
this.algorithm = algorithm;
}
public String getMasterKeystoreId()
{
return masterKeystoreId;
}
public void setMasterKeystoreId(String masterKeystoreId)
{
this.masterKeystoreId = masterKeystoreId;
}
public String getMasterKeyAlias()
{
return masterKeyAlias;
}
public void setMasterKeyAlias(String masterKeyAlias)
{
this.masterKeyAlias = masterKeyAlias;
}
@Override
public String toString()
{
return "ContentUrlKeyEntity [id=" + id + ", encryptedKeyAsBytes="
+ encryptedKeyAsBytes+ ", keySize=" + keySize + ", algorithm="
+ algorithm + ", masterKeystoreId=" + masterKeystoreId
+ ", masterKeyAlias=" + masterKeyAlias
+ ", unencryptedFileSize=" + unencryptedFileSize + "]";
}
}

View File

@@ -0,0 +1,55 @@
package org.alfresco.repo.domain.contentdata;
import java.io.Serializable;
import java.nio.ByteBuffer;
public class EncryptedKey implements Serializable
{
private static final long serialVersionUID = 1L;
private String masterKeystoreId;
private String masterKeyAlias;
private final String algorithm;
private final ByteBuffer encryptedKeyBytes;
public EncryptedKey(String masterKeystoreId, String masterKeyAlias, String algorithm, ByteBuffer encryptedKeyBytes)
{
this.masterKeyAlias = masterKeyAlias;
this.masterKeystoreId = masterKeystoreId;
this.algorithm = algorithm;
this.encryptedKeyBytes = encryptedKeyBytes.asReadOnlyBuffer();
}
public String getMasterKeystoreId()
{
return masterKeystoreId;
}
public String getMasterKeyAlias()
{
return masterKeyAlias;
}
public ByteBuffer getEncryptedKeyBytes()
{
return encryptedKeyBytes;
}
public String getAlgorithm()
{
return this.algorithm;
}
public ByteBuffer getByteBuffer()
{
return this.encryptedKeyBytes.asReadOnlyBuffer();
}
public int keySize()
{
byte[] eKey = new byte[getByteBuffer().remaining()];
getByteBuffer().get(eKey);
return eKey.length * 8;
}
}

View File

@@ -31,9 +31,11 @@ import org.alfresco.ibatis.IdsEntity;
import org.alfresco.repo.domain.contentdata.AbstractContentDataDAOImpl; import org.alfresco.repo.domain.contentdata.AbstractContentDataDAOImpl;
import org.alfresco.repo.domain.contentdata.ContentDataEntity; import org.alfresco.repo.domain.contentdata.ContentDataEntity;
import org.alfresco.repo.domain.contentdata.ContentUrlEntity; import org.alfresco.repo.domain.contentdata.ContentUrlEntity;
import org.alfresco.repo.domain.contentdata.ContentUrlKeyEntity;
import org.alfresco.repo.domain.contentdata.ContentUrlOrphanQuery; import org.alfresco.repo.domain.contentdata.ContentUrlOrphanQuery;
import org.alfresco.repo.domain.contentdata.ContentUrlUpdateEntity; import org.alfresco.repo.domain.contentdata.ContentUrlUpdateEntity;
import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.util.EqualsHelper;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
import org.alfresco.util.ParameterCheck; import org.alfresco.util.ParameterCheck;
import org.apache.ibatis.session.RowBounds; import org.apache.ibatis.session.RowBounds;
@@ -45,6 +47,7 @@ import org.springframework.dao.DataIntegrityViolationException;
* iBatis-specific implementation of the ContentData DAO. * iBatis-specific implementation of the ContentData DAO.
* *
* @author Derek Hulley * @author Derek Hulley
* @author sglover
* @since 3.2 * @since 3.2
*/ */
public class ContentDataDAOImpl extends AbstractContentDataDAOImpl public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
@@ -62,16 +65,20 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
private static final String UPDATE_CONTENT_DATA = "alfresco.content.update_ContentData"; private static final String UPDATE_CONTENT_DATA = "alfresco.content.update_ContentData";
private static final String DELETE_CONTENT_DATA = "alfresco.content.delete_ContentData"; private static final String DELETE_CONTENT_DATA = "alfresco.content.delete_ContentData";
private static final String DELETE_CONTENT_URLS = "alfresco.content.delete_ContentUrls"; private static final String DELETE_CONTENT_URLS = "alfresco.content.delete_ContentUrls";
private static final String DELETE_CONTENT_URL_KEYS = "alfresco.content.delete_ContentUrlKeys";
private static final String DELETE_SYMMETRIC_KEY = "alfresco.content.delete_KeyData";
private SqlSessionTemplate template; private static final String UPDATE_SYMMETRIC_KEY = "alfresco.content.update_KeyData";
private static final String INSERT_SYMMETRIC_KEY = "alfresco.content.insert.insert_KeyData";
private static final String SELECT_SYMMETRIC_KEYS_BY_MASTER_KEY = "alfresco.content.select_SymmetricKeysByMasterKey";
private static final String COUNT_SYMMETRIC_KEYS_BY_MASTER_KEY = "alfresco.content.select_CountSymmetricKeysByMasterKey";
protected SqlSessionTemplate template;
public final void setSqlSessionTemplate(SqlSessionTemplate sqlSessionTemplate) public final void setSqlSessionTemplate(SqlSessionTemplate sqlSessionTemplate)
{ {
this.template = sqlSessionTemplate; this.template = sqlSessionTemplate;
} }
public Pair<Long, String> createContentUrlOrphaned(String contentUrl, Date orphanTime) public Pair<Long, String> createContentUrlOrphaned(String contentUrl, Date orphanTime)
{ {
ContentUrlEntity contentUrlEntity = new ContentUrlEntity(); ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
@@ -85,7 +92,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
} }
@Override @Override
protected ContentUrlEntity createContentUrlEntity(String contentUrl, long size) protected ContentUrlEntity createContentUrlEntity(String contentUrl, long size, ContentUrlKeyEntity contentUrlKeyEntity)
{ {
ContentUrlEntity contentUrlEntity = new ContentUrlEntity(); ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setContentUrl(contentUrl); contentUrlEntity.setContentUrl(contentUrl);
@@ -93,7 +100,14 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
contentUrlEntity.setOrphanTime(null); contentUrlEntity.setOrphanTime(null);
/* Long id = (Long) */ template.insert(INSERT_CONTENT_URL, contentUrlEntity); /* Long id = (Long) */ template.insert(INSERT_CONTENT_URL, contentUrlEntity);
/*contentUrlEntity.setId(id);*/ /*contentUrlEntity.setId(id);*/
if(contentUrlKeyEntity != null)
{
template.insert(INSERT_SYMMETRIC_KEY, contentUrlKeyEntity);
// contentUrlEntity.setContentUrlKey(contentUrlKeyEntity);
}
// Done // Done
return contentUrlEntity; return contentUrlEntity;
} }
@@ -103,13 +117,13 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
{ {
ContentUrlEntity contentUrlEntity = new ContentUrlEntity(); ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setId(id); contentUrlEntity.setId(id);
contentUrlEntity = template.selectOne(SELECT_CONTENT_URL_BY_ID, contentUrlEntity); contentUrlEntity = (ContentUrlEntity) template.selectOne(SELECT_CONTENT_URL_BY_ID, contentUrlEntity);
// Done // Done
return contentUrlEntity; return contentUrlEntity;
} }
@Override @Override
protected ContentUrlEntity getContentUrlEntity(String contentUrl) public ContentUrlEntity getContentUrlEntity(String contentUrl)
{ {
ContentUrlEntity contentUrlEntity = new ContentUrlEntity(); ContentUrlEntity contentUrlEntity = new ContentUrlEntity();
contentUrlEntity.setContentUrl(contentUrl); contentUrlEntity.setContentUrl(contentUrl);
@@ -122,7 +136,6 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
return contentUrlEntity; return contentUrlEntity;
} }
@SuppressWarnings("unchecked")
public void getContentUrlsOrphaned( public void getContentUrlsOrphaned(
final ContentUrlHandler contentUrlHandler, final ContentUrlHandler contentUrlHandler,
final Long maxOrphanTimeExclusive, final Long maxOrphanTimeExclusive,
@@ -132,7 +145,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
ContentUrlOrphanQuery query = new ContentUrlOrphanQuery(); ContentUrlOrphanQuery query = new ContentUrlOrphanQuery();
query.setMaxOrphanTimeExclusive(maxOrphanTimeExclusive); query.setMaxOrphanTimeExclusive(maxOrphanTimeExclusive);
List<ContentUrlEntity> results = template.selectList(SELECT_CONTENT_URLS_ORPHANED, List<ContentUrlEntity> results = template.selectList(SELECT_CONTENT_URLS_ORPHANED,
query, query,
new RowBounds(0, maxResults)); new RowBounds(0, maxResults));
// Pass the result to the callback // Pass the result to the callback
@@ -159,6 +172,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
*/ */
public int deleteContentUrls(List<Long> ids) public int deleteContentUrls(List<Long> ids)
{ {
template.delete(DELETE_CONTENT_URL_KEYS, ids);
return template.delete(DELETE_CONTENT_URLS, ids); return template.delete(DELETE_CONTENT_URLS, ids);
} }
@@ -171,7 +185,7 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
{ {
contentUrlEntity.setContentUrlShort(contentUrlEntity.getContentUrlShort().toLowerCase()); contentUrlEntity.setContentUrlShort(contentUrlEntity.getContentUrlShort().toLowerCase());
} }
contentUrlEntity = template.selectOne(SELECT_CONTENT_URL_BY_KEY_UNREFERENCED, contentUrlEntity); contentUrlEntity = (ContentUrlEntity) template.selectOne(SELECT_CONTENT_URL_BY_KEY_UNREFERENCED, contentUrlEntity);
// Done // Done
return contentUrlEntity; return contentUrlEntity;
} }
@@ -206,12 +220,11 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
{ {
Map<String, Object> params = new HashMap<String, Object>(11); Map<String, Object> params = new HashMap<String, Object>(11);
params.put("id", id); params.put("id", id);
ContentDataEntity contentDataEntity = template.selectOne(SELECT_CONTENT_DATA_BY_ID, params); ContentDataEntity contentDataEntity = (ContentDataEntity) template.selectOne(SELECT_CONTENT_DATA_BY_ID, params);
// Done // Done
return contentDataEntity; return contentDataEntity;
} }
@SuppressWarnings("unchecked")
@Override @Override
protected List<ContentDataEntity> getContentDataEntitiesForNodes(Set<Long> nodeIds) protected List<ContentDataEntity> getContentDataEntitiesForNodes(Set<Long> nodeIds)
{ {
@@ -266,7 +279,6 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
IdsEntity idsEntity = new IdsEntity(); IdsEntity idsEntity = new IdsEntity();
idsEntity.setIdOne(nodeId); idsEntity.setIdOne(nodeId);
idsEntity.setIds(new ArrayList<Long>(qnameIds)); idsEntity.setIds(new ArrayList<Long>(qnameIds));
@SuppressWarnings("unchecked")
List<Long> ids = template.selectList(SELECT_CONTENT_DATA_BY_NODE_AND_QNAME, idsEntity); List<Long> ids = template.selectList(SELECT_CONTENT_DATA_BY_NODE_AND_QNAME, idsEntity);
// Delete each one // Delete each one
for (Long id : ids) for (Long id : ids)
@@ -284,4 +296,49 @@ public class ContentDataDAOImpl extends AbstractContentDataDAOImpl
} }
} }
} }
@Override
protected int updateContentUrlEntity(ContentUrlEntity existing, ContentUrlEntity entity)
{
int ret = 0;
ContentUrlKeyEntity existingContentUrlKey = existing.getContentUrlKey();
ContentUrlKeyEntity contentUrlKey = entity.getContentUrlKey();
contentUrlKey.setContentUrlId(existing.getId());
if(existingContentUrlKey == null)
{
ret = template.insert(INSERT_SYMMETRIC_KEY, contentUrlKey);
}
else if (!EqualsHelper.nullSafeEquals(existingContentUrlKey, contentUrlKey))
{
ret = template.update(UPDATE_SYMMETRIC_KEY, contentUrlKey);
}
return ret;
}
@Override
protected int deleteContentUrlEntity(long id)
{
Map<String, Object> params = new HashMap<String, Object>(11);
params.put("id", id);
return template.delete(DELETE_SYMMETRIC_KEY, params);
}
@Override
public List<ContentUrlKeyEntity> getSymmetricKeysByMasterKeyAlias(String masterKeyAlias, long fromId, int maxResults)
{
ContentUrlKeyEntity entity = new ContentUrlKeyEntity();
entity.setMasterKeyAlias(masterKeyAlias);
entity.setId(fromId);
List<ContentUrlKeyEntity> results = template.selectList(SELECT_SYMMETRIC_KEYS_BY_MASTER_KEY,
entity, new RowBounds(0, maxResults));
return results;
}
@Override
public int countSymmetricKeysForMasterKeyAlias(String masterKeyAlias)
{
return (Integer)template.selectOne(COUNT_SYMMETRIC_KEYS_BY_MASTER_KEY, masterKeyAlias);
}
} }

View File

@@ -1,5 +1,5 @@
/* /*
* Copyright (C) 2005-2010 Alfresco Software Limited. * Copyright (C) 2005-2014 Alfresco Software Limited.
* *
* This file is part of Alfresco * This file is part of Alfresco
* *
@@ -18,6 +18,11 @@
*/ */
package org.alfresco.repo.content; package org.alfresco.repo.content;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel; import java.nio.channels.ReadableByteChannel;
@@ -26,14 +31,19 @@ import java.util.Set;
import javax.transaction.UserTransaction; import javax.transaction.UserTransaction;
import junit.framework.TestCase;
import org.alfresco.repo.content.ContentStore.ContentUrlHandler; import org.alfresco.repo.content.ContentStore.ContentUrlHandler;
import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.transaction.TransactionService; import org.alfresco.service.transaction.TransactionService;
import org.alfresco.util.ApplicationContextHelper; import org.alfresco.util.ApplicationContextHelper;
import org.alfresco.util.BaseApplicationContextHelper;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
/** /**
@@ -44,14 +54,22 @@ import org.springframework.context.ApplicationContext;
* @see org.alfresco.service.cmr.repository.ContentReader * @see org.alfresco.service.cmr.repository.ContentReader
* @see org.alfresco.service.cmr.repository.ContentWriter * @see org.alfresco.service.cmr.repository.ContentWriter
* *
* @author sglover
* @author Derek Hulley * @author Derek Hulley
*/ */
public abstract class AbstractReadOnlyContentStoreTest extends TestCase public abstract class AbstractReadOnlyContentStoreTest
{ {
protected static final ApplicationContext ctx = ApplicationContextHelper.getApplicationContext(); protected static ApplicationContext ctx;
private static Log logger = LogFactory.getLog(AbstractReadOnlyContentStoreTest.class); private static Log logger = LogFactory.getLog(AbstractReadOnlyContentStoreTest.class);
@Rule public TestName name = new TestName();
protected String getName()
{
return name.getMethodName();
}
protected TransactionService transactionService; protected TransactionService transactionService;
private UserTransaction txn; private UserTransaction txn;
@@ -60,11 +78,17 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
super(); super();
} }
@BeforeClass
public static void beforeClass() throws Exception
{
ctx = BaseApplicationContextHelper.getApplicationContext(ApplicationContextHelper.CONFIG_LOCATIONS);
}
/** /**
* Starts a transaction * Starts a transaction
*/ */
@Override @Before
public void setUp() throws Exception public void before() throws Exception
{ {
transactionService = (TransactionService) ctx.getBean("TransactionService"); transactionService = (TransactionService) ctx.getBean("TransactionService");
txn = transactionService.getUserTransaction(); txn = transactionService.getUserTransaction();
@@ -74,8 +98,8 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
/** /**
* Rolls back the transaction * Rolls back the transaction
*/ */
@Override @After
public void tearDown() throws Exception public void after() throws Exception
{ {
try { txn.rollback(); } catch (Throwable e) {e.printStackTrace();} try { txn.rollback(); } catch (Throwable e) {e.printStackTrace();}
} }
@@ -144,6 +168,7 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
} }
} }
@Test
public void testSetUp() throws Exception public void testSetUp() throws Exception
{ {
// check that the store remains the same // check that the store remains the same
@@ -182,6 +207,7 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
/** /**
* Checks that the error handling for <i>inappropriate</i> content URLs * Checks that the error handling for <i>inappropriate</i> content URLs
*/ */
@Test
public void testIllegalReadableContentUrls() public void testIllegalReadableContentUrls()
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -193,13 +219,14 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
/** /**
* Checks that the various methods of obtaining a reader are supported. * Checks that the various methods of obtaining a reader are supported.
*/ */
@Test
public void testGetReaderForExistingContentUrl() throws Exception public void testGetReaderForExistingContentUrl() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
String contentUrl = getExistingContentUrl(); String contentUrl = getExistingContentUrl();
if (contentUrl == null) if (contentUrl == null)
{ {
logger.warn("Store test " + getName() + " not possible on " + store.getClass().getName()); logger.warn("Store test testGetReaderForExistingContentUrl not possible on " + store.getClass().getName());
return; return;
} }
// Get the reader // Get the reader
@@ -227,13 +254,14 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
* <p> * <p>
* Only executes if the reader implements {@link RandomAccessContent}. * Only executes if the reader implements {@link RandomAccessContent}.
*/ */
@Test
public void testRandomAccessRead() throws Exception public void testRandomAccessRead() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
String contentUrl = getExistingContentUrl(); String contentUrl = getExistingContentUrl();
if (contentUrl == null) if (contentUrl == null)
{ {
logger.warn("Store test " + getName() + " not possible on " + store.getClass().getName()); logger.warn("Store test testRandomAccessRead not possible on " + store.getClass().getName());
return; return;
} }
// Get the reader // Get the reader
@@ -256,6 +284,7 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
fileChannel.close(); fileChannel.close();
} }
@Test
public void testBlockedWriteOperations() throws Exception public void testBlockedWriteOperations() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -277,7 +306,7 @@ public abstract class AbstractReadOnlyContentStoreTest extends TestCase
String contentUrl = getExistingContentUrl(); String contentUrl = getExistingContentUrl();
if (contentUrl == null) if (contentUrl == null)
{ {
logger.warn("Store test " + getName() + " not possible on " + store.getClass().getName()); logger.warn("Store test testBlockedWriteOperations not possible on " + store.getClass().getName());
return; return;
} }
// Ensure that we can't delete a URL // Ensure that we can't delete a URL

View File

@@ -1,5 +1,5 @@
/* /*
* Copyright (C) 2005-2010 Alfresco Software Limited. * Copyright (C) 2005-2014 Alfresco Software Limited.
* *
* This file is part of Alfresco * This file is part of Alfresco
* *
@@ -18,6 +18,12 @@
*/ */
package org.alfresco.repo.content; package org.alfresco.repo.content;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
@@ -39,6 +45,7 @@ import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.junit.Test;
/** /**
* Abstract base class that provides a set of tests for implementations * Abstract base class that provides a set of tests for implementations
@@ -53,7 +60,7 @@ import org.apache.commons.logging.LogFactory;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyContentStoreTest public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyContentStoreTest
{ {
private static Log logger = LogFactory.getLog(AbstractWritableContentStoreTest.class); protected static Log logger = LogFactory.getLog(AbstractWritableContentStoreTest.class);
public AbstractWritableContentStoreTest() public AbstractWritableContentStoreTest()
{ {
@@ -68,7 +75,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
protected String getExistingContentUrl() protected String getExistingContentUrl()
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
writer.putContent("Content for " + getName()); writer.putContent("Content for getExistingContentUrl");
return writer.getContentUrl(); return writer.getContentUrl();
} }
@@ -84,7 +91,8 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
ContentStore store = getStore(); ContentStore store = getStore();
return store.getWriter(ContentStore.NEW_CONTENT_CONTEXT); return store.getWriter(ContentStore.NEW_CONTENT_CONTEXT);
} }
@Test
public void testSetUp() throws Exception public void testSetUp() throws Exception
{ {
// check that the store remains the same // check that the store remains the same
@@ -93,6 +101,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertTrue("The same instance of the store must be returned for getStore", store == getStore()); assertTrue("The same instance of the store must be returned for getStore", store == getStore());
} }
@Test
public void testWritable() throws Exception public void testWritable() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -102,6 +111,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
/** /**
* Just checks that the method doesn't blow up * Just checks that the method doesn't blow up
*/ */
@Test
public void testSpaceFree() throws Exception public void testSpaceFree() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -111,6 +121,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
/** /**
* Just checks that the method doesn't blow up * Just checks that the method doesn't blow up
*/ */
@Test
public void testSpaceTotal() throws Exception public void testSpaceTotal() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -120,6 +131,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
/** /**
* Just check that the method doesn't blow up * Just check that the method doesn't blow up
*/ */
@Test
public void testRootLocation() throws Exception public void testRootLocation() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -148,6 +160,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
/** /**
* Checks that the error handling for <i>inappropriate</i> content URLs * Checks that the error handling for <i>inappropriate</i> content URLs
*/ */
@Test
public void testIllegalWritableContentUrls() public void testIllegalWritableContentUrls()
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -159,10 +172,11 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
/** /**
* Get a writer and write a little bit of content before reading it. * Get a writer and write a little bit of content before reading it.
*/ */
@Test
public void testSimpleUse() public void testSimpleUse()
{ {
ContentStore store = getStore(); ContentStore store = getStore();
String content = "Content for " + getName(); String content = "Content for testSimpleUse";
ContentWriter writer = store.getWriter(ContentStore.NEW_CONTENT_CONTEXT); ContentWriter writer = store.getWriter(ContentStore.NEW_CONTENT_CONTEXT);
assertNotNull("Writer may not be null", writer); assertNotNull("Writer may not be null", writer);
@@ -186,6 +200,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
/** /**
* Checks that the various methods of obtaining a reader are supported. * Checks that the various methods of obtaining a reader are supported.
*/ */
@Test
public synchronized void testGetReader() throws Exception public synchronized void testGetReader() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -200,7 +215,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
ContentReader readerFromWriterBeforeWrite = writer.getReader(); ContentReader readerFromWriterBeforeWrite = writer.getReader();
assertNotNull("A reader must always be available from the writer", readerFromWriterBeforeWrite); assertNotNull("A reader must always be available from the writer", readerFromWriterBeforeWrite);
String content = "Content for " + getName(); String content = "Content for testGetReader";
// write some content // write some content
long before = System.currentTimeMillis(); long before = System.currentTimeMillis();
@@ -255,6 +270,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
* Check that a reader is immutable, i.e. that a reader fetched before a * Check that a reader is immutable, i.e. that a reader fetched before a
* write doesn't suddenly become aware of the content once it has been written. * write doesn't suddenly become aware of the content once it has been written.
*/ */
@Test
public void testReaderImmutability() public void testReaderImmutability()
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
@@ -264,7 +280,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertFalse(readerBeforeWrite.exists()); assertFalse(readerBeforeWrite.exists());
// Write some content // Write some content
writer.putContent("Content for " + getName()); writer.putContent("Content for testReaderImmutability");
assertFalse("Reader's state changed after write", readerBeforeWrite.exists()); assertFalse("Reader's state changed after write", readerBeforeWrite.exists());
try try
{ {
@@ -281,6 +297,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertTrue("New reader after write should be directed to new content", readerAfterWrite.exists()); assertTrue("New reader after write should be directed to new content", readerAfterWrite.exists());
} }
@Test
public void testMimetypAndEncodingAndLocale() throws Exception public void testMimetypAndEncodingAndLocale() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
@@ -309,6 +326,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertEquals("Encoding and decoding of strings failed", content, contentCheck); assertEquals("Encoding and decoding of strings failed", content, contentCheck);
} }
@Test
public void testClosedState() throws Exception public void testClosedState() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
@@ -375,11 +393,12 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
return found[0]; return found[0];
} }
@Test
public void testDeleteSimple() throws Exception public void testDeleteSimple() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
writer.putContent("Content for " + getName()); writer.putContent("Content for testDeleteSimple");
String contentUrl = writer.getContentUrl(); String contentUrl = writer.getContentUrl();
assertTrue("Content must now exist", store.exists(contentUrl)); assertTrue("Content must now exist", store.exists(contentUrl));
try try
@@ -388,7 +407,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
} }
catch (UnsupportedOperationException e) catch (UnsupportedOperationException e)
{ {
logger.warn("Store test " + getName() + " not possible on " + store.getClass().getName()); logger.warn("Store test testDeleteSimple not possible on " + store.getClass().getName());
return; return;
} }
assertFalse("Content must now be removed", store.exists(contentUrl)); assertFalse("Content must now be removed", store.exists(contentUrl));
@@ -399,12 +418,13 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
* <p> * <p>
* Only applies when {@link #getStore()} returns a value. * Only applies when {@link #getStore()} returns a value.
*/ */
@Test
public void testDeleteReaderStates() throws Exception public void testDeleteReaderStates() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
String content = "Content for " + getName(); String content = "Content for testDeleteReaderStates";
String contentUrl = writer.getContentUrl(); String contentUrl = writer.getContentUrl();
// write some bytes, but don't close the stream // write some bytes, but don't close the stream
@@ -481,6 +501,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
/** /**
* Checks that the writer can have a listener attached * Checks that the writer can have a listener attached
*/ */
@Test
public void testWriteStreamListener() throws Exception public void testWriteStreamListener() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
@@ -507,6 +528,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
* If the resource accessed by {@link #getReader()} and {@link #getWriter()} is not the same, then * If the resource accessed by {@link #getReader()} and {@link #getWriter()} is not the same, then
* values written and read won't be the same. * values written and read won't be the same.
*/ */
@Test
public void testWriteAndReadString() throws Exception public void testWriteAndReadString() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
@@ -521,6 +543,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertEquals("Write and read didn't work", content, check); assertEquals("Write and read didn't work", content, check);
} }
@Test
public void testStringTruncation() throws Exception public void testStringTruncation() throws Exception
{ {
String content = "1234567890"; String content = "1234567890";
@@ -537,11 +560,12 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertEquals("Truncated strings don't match", "12345", checkContent); assertEquals("Truncated strings don't match", "12345", checkContent);
} }
@Test
public void testReadAndWriteFile() throws Exception public void testReadAndWriteFile() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
File sourceFile = TempFileProvider.createTempFile(getName(), ".txt"); File sourceFile = TempFileProvider.createTempFile("testReadAndWriteFile", ".txt");
sourceFile.deleteOnExit(); sourceFile.deleteOnExit();
// dump some content into the temp file // dump some content into the temp file
String content = "ABC"; String content = "ABC";
@@ -555,7 +579,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertTrue("Stream close not detected", writer.isClosed()); assertTrue("Stream close not detected", writer.isClosed());
// create a sink temp file // create a sink temp file
File sinkFile = TempFileProvider.createTempFile(getName(), ".txt"); File sinkFile = TempFileProvider.createTempFile("testReadAndWriteFile", ".txt");
sinkFile.deleteOnExit(); sinkFile.deleteOnExit();
// get the content into our temp file // get the content into our temp file
@@ -573,6 +597,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertEquals("Write out of and read into files failed", content, check); assertEquals("Write out of and read into files failed", content, check);
} }
@Test
public void testReadAndWriteStreamByPull() throws Exception public void testReadAndWriteStreamByPull() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
@@ -593,11 +618,12 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
assertEquals("Write out and read in using streams failed", content, check); assertEquals("Write out and read in using streams failed", content, check);
} }
@Test
public void testReadAndWriteStreamByPush() throws Exception public void testReadAndWriteStreamByPush() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
String content = "ABC"; String content = "Some Random Content";
// get the content output stream // get the content output stream
OutputStream os = writer.getContentOutputStream(); OutputStream os = writer.getContentOutputStream();
os.write(content.getBytes()); os.write(content.getBytes());
@@ -611,7 +637,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
InputStream is = reader.getContentInputStream(); InputStream is = reader.getContentInputStream();
byte[] buffer = new byte[100]; byte[] buffer = new byte[100];
int count = is.read(buffer); int count = is.read(buffer);
assertEquals("No content read", 3, count); assertEquals("No content read", content.length(), count);
is.close(); is.close();
String check = new String(buffer, 0, count); String check = new String(buffer, 0, count);
@@ -623,6 +649,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
* <p> * <p>
* Only applies when {@link #getStore()} returns a value. * Only applies when {@link #getStore()} returns a value.
*/ */
@Test
public void testListUrls() throws Exception public void testListUrls() throws Exception
{ {
ContentStore store = getStore(); ContentStore store = getStore();
@@ -633,7 +660,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
} }
catch (UnsupportedOperationException e) catch (UnsupportedOperationException e)
{ {
logger.warn("Store test " + getName() + " not possible on " + store.getClass().getName()); logger.warn("Store test testListUrls not possible on " + store.getClass().getName());
return; return;
} }
// Proceed with the test // Proceed with the test
@@ -662,6 +689,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
* <p> * <p>
* Only executes if the writer implements {@link RandomAccessContent}. * Only executes if the writer implements {@link RandomAccessContent}.
*/ */
@Test
public void testRandomAccessWrite() throws Exception public void testRandomAccessWrite() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();
@@ -727,6 +755,7 @@ public abstract class AbstractWritableContentStoreTest extends AbstractReadOnlyC
* <p> * <p>
* Only executes if the reader implements {@link RandomAccessContent}. * Only executes if the reader implements {@link RandomAccessContent}.
*/ */
@Test
public void testRandomAccessRead() throws Exception public void testRandomAccessRead() throws Exception
{ {
ContentWriter writer = getWriter(); ContentWriter writer = getWriter();

View File

@@ -18,6 +18,7 @@
*/ */
package org.alfresco.repo.content; package org.alfresco.repo.content;
import junit.framework.JUnit4TestAdapter;
import junit.framework.Test; import junit.framework.Test;
import junit.framework.TestSuite; import junit.framework.TestSuite;
@@ -46,15 +47,15 @@ public class ContentFullContextTestSuite extends TestSuite
// These tests need a full context, at least for now // These tests need a full context, at least for now
suite.addTestSuite(ContentStoreCleanerTest.class); suite.addTestSuite(ContentStoreCleanerTest.class);
//suite.addTestSuite(CharsetFinderTest.class); //suite.addTestSuite(CharsetFinderTest.class);
suite.addTestSuite(FileContentStoreTest.class); suite.addTest(new JUnit4TestAdapter(FileContentStoreTest.class));
suite.addTestSuite(NoRandomAccessFileContentStoreTest.class); suite.addTest(new JUnit4TestAdapter(NoRandomAccessFileContentStoreTest.class));
suite.addTestSuite(ReadOnlyFileContentStoreTest.class); suite.addTest(new JUnit4TestAdapter(ReadOnlyFileContentStoreTest.class));
suite.addTestSuite(ContentStoreReplicatorTest.class); suite.addTestSuite(ContentStoreReplicatorTest.class);
suite.addTestSuite(ReplicatingContentStoreTest.class); suite.addTest(new JUnit4TestAdapter(ReplicatingContentStoreTest.class));
suite.addTestSuite(ContentDataTest.class); suite.addTestSuite(ContentDataTest.class);
//suite.addTestSuite(MimetypeMapTest.class); //suite.addTestSuite(MimetypeMapTest.class);
suite.addTestSuite(RoutingContentServiceTest.class); suite.addTestSuite(RoutingContentServiceTest.class);
suite.addTestSuite(RoutingContentStoreTest.class); suite.addTest(new JUnit4TestAdapter(RoutingContentStoreTest.class));
try try
{ {

View File

@@ -1,5 +1,5 @@
/* /*
* Copyright (C) 2005-2010 Alfresco Software Limited. * Copyright (C) 2005-2014 Alfresco Software Limited.
* *
* This file is part of Alfresco * This file is part of Alfresco
* *
@@ -18,6 +18,12 @@
*/ */
package org.alfresco.repo.content; package org.alfresco.repo.content;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
@@ -33,6 +39,8 @@ import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@@ -54,10 +62,9 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
private ContentStore storeD; private ContentStore storeD;
private ContentStore routingStore; private ContentStore routingStore;
@Override @Before
public void setUp() throws Exception public void before() throws Exception
{ {
super.setUp();
File tempDir = TempFileProvider.getTempDir(); File tempDir = TempFileProvider.getTempDir();
// Create a subdirectory for A // Create a subdirectory for A
File storeADir = new File(tempDir, "A"); File storeADir = new File(tempDir, "A");
@@ -80,6 +87,7 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
return routingStore; return routingStore;
} }
@Test
public void testSetUp() throws Exception public void testSetUp() throws Exception
{ {
assertNotNull(routingStore); assertNotNull(routingStore);
@@ -105,6 +113,7 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
/** /**
* Checks that requests for missing content URLs are served. * Checks that requests for missing content URLs are served.
*/ */
@Test
public void testMissingUrl() public void testMissingUrl()
{ {
String missingContentUrl = FileContentStore.createNewFileStoreUrl(); String missingContentUrl = FileContentStore.createNewFileStoreUrl();
@@ -123,13 +132,14 @@ public class RoutingContentStoreTest extends AbstractWritableContentStoreTest
} }
} }
@Test
public void testGeneralUse() public void testGeneralUse()
{ {
for (int i = 0 ; i < 20; i++) for (int i = 0 ; i < 20; i++)
{ {
ContentContext contentContext = new ContentContext(null, null); ContentContext contentContext = new ContentContext(null, null);
ContentWriter writer = routingStore.getWriter(contentContext); ContentWriter writer = routingStore.getWriter(contentContext);
String content = "This was generated by " + this.getClass().getName() + "#" + getName() + " number " + i; String content = "This was generated by " + this.getClass().getName() + "#testGeneralUse number " + i;
writer.putContent(content); writer.putContent(content);
// Check that it exists // Check that it exists
String contentUrl = writer.getContentUrl(); String contentUrl = writer.getContentUrl();

View File

@@ -1,5 +1,5 @@
/* /*
* Copyright (C) 2005-2011 Alfresco Software Limited. * Copyright (C) 2005-2014 Alfresco Software Limited.
* *
* This file is part of Alfresco * This file is part of Alfresco
* *
@@ -18,12 +18,15 @@
*/ */
package org.alfresco.repo.content.caching; package org.alfresco.repo.content.caching;
import java.io.File; import static org.junit.Assert.assertEquals;
import java.util.Arrays; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import java.io.File;
import java.util.Arrays;
import org.alfresco.repo.cache.DefaultSimpleCache; import org.alfresco.repo.cache.DefaultSimpleCache;
import org.alfresco.repo.cache.SimpleCache; import org.alfresco.repo.cache.SimpleCache;
import org.alfresco.repo.content.AbstractWritableContentStoreTest; import org.alfresco.repo.content.AbstractWritableContentStoreTest;
@@ -34,11 +37,11 @@ import org.alfresco.repo.content.filestore.FileContentStore;
import org.alfresco.service.cmr.repository.ContentWriter; import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.junit.Before;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.internal.runners.JUnit38ClassRunner; import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
/** /**
* Tests for the CachingContentStore that benefit from a full set of tests * Tests for the CachingContentStore that benefit from a full set of tests
* defined in AbstractWritableContentStoreTest. * defined in AbstractWritableContentStoreTest.
@@ -54,11 +57,9 @@ public class CachingContentStoreSpringTest extends AbstractWritableContentStoreT
private ContentCacheImpl cache; private ContentCacheImpl cache;
@Override @Before
public void setUp() throws Exception public void before() throws Exception
{ {
super.setUp();
File tempDir = TempFileProvider.getTempDir(); File tempDir = TempFileProvider.getTempDir();
backingStore = new FileContentStore(ctx, backingStore = new FileContentStore(ctx,

View File

@@ -18,6 +18,12 @@
*/ */
package org.alfresco.repo.content.filestore; package org.alfresco.repo.content.filestore;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File; import java.io.File;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@@ -32,6 +38,7 @@ import org.alfresco.service.cmr.repository.ContentIOException;
import org.alfresco.service.cmr.repository.ContentWriter; import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.junit.Before;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@@ -46,11 +53,9 @@ public class FileContentStoreTest extends AbstractWritableContentStoreTest
{ {
private FileContentStore store; private FileContentStore store;
@Override @Before
public void setUp() throws Exception public void before() throws Exception
{ {
super.setUp();
// create a store that uses a subdirectory of the temp directory // create a store that uses a subdirectory of the temp directory
File tempDir = TempFileProvider.getTempDir(); File tempDir = TempFileProvider.getTempDir();
store = new FileContentStore(ctx, store = new FileContentStore(ctx,

View File

@@ -24,6 +24,7 @@ import org.alfresco.repo.content.AbstractWritableContentStoreTest;
import org.alfresco.repo.content.ContentStore; import org.alfresco.repo.content.ContentStore;
import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.junit.Before;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@@ -39,11 +40,9 @@ public class NoRandomAccessFileContentStoreTest extends AbstractWritableContentS
{ {
private FileContentStore store; private FileContentStore store;
@Override @Before
public void setUp() throws Exception public void before() throws Exception
{ {
super.setUp();
// create a store that uses a subdirectory of the temp directory // create a store that uses a subdirectory of the temp directory
File tempDir = TempFileProvider.getTempDir(); File tempDir = TempFileProvider.getTempDir();
store = new FileContentStore(ctx, store = new FileContentStore(ctx,

View File

@@ -21,10 +21,10 @@ package org.alfresco.repo.content.filestore;
import java.io.File; import java.io.File;
import org.alfresco.repo.content.AbstractReadOnlyContentStoreTest; import org.alfresco.repo.content.AbstractReadOnlyContentStoreTest;
import org.alfresco.repo.content.AbstractWritableContentStoreTest;
import org.alfresco.repo.content.ContentStore; import org.alfresco.repo.content.ContentStore;
import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.junit.Before;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@@ -40,11 +40,9 @@ public class ReadOnlyFileContentStoreTest extends AbstractReadOnlyContentStoreTe
{ {
private FileContentStore store; private FileContentStore store;
@Override @Before
public void setUp() throws Exception public void before() throws Exception
{ {
super.setUp();
// create a store that uses a subdirectory of the temp directory // create a store that uses a subdirectory of the temp directory
File tempDir = TempFileProvider.getTempDir(); File tempDir = TempFileProvider.getTempDir();
store = new FileContentStore(ctx, store = new FileContentStore(ctx,

View File

@@ -1,5 +1,5 @@
/* /*
* Copyright (C) 2005-2010 Alfresco Software Limited. * Copyright (C) 2005-2014 Alfresco Software Limited.
* *
* This file is part of Alfresco * This file is part of Alfresco
* *
@@ -18,6 +18,10 @@
*/ */
package org.alfresco.repo.content.replication; package org.alfresco.repo.content.replication;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
@@ -38,6 +42,7 @@ import org.alfresco.service.cmr.repository.ContentWriter;
import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.GUID; import org.alfresco.util.GUID;
import org.alfresco.util.TempFileProvider; import org.alfresco.util.TempFileProvider;
import org.junit.Before;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@@ -59,11 +64,9 @@ public class ReplicatingContentStoreTest extends AbstractWritableContentStoreTes
private ContentStore primaryStore; private ContentStore primaryStore;
private List<ContentStore> secondaryStores; private List<ContentStore> secondaryStores;
@Override @Before
public void setUp() throws Exception public void before() throws Exception
{ {
super.setUp();
File tempDir = TempFileProvider.getTempDir(); File tempDir = TempFileProvider.getTempDir();
// create a primary file store // create a primary file store
String storeDir = tempDir.getAbsolutePath() + File.separatorChar + GUID.generate(); String storeDir = tempDir.getAbsolutePath() + File.separatorChar + GUID.generate();