diff --git a/.externalToolBuilders/JibX.launch b/.externalToolBuilders/JibX.launch
index a5578dd881..24b9b9b84e 100644
--- a/.externalToolBuilders/JibX.launch
+++ b/.externalToolBuilders/JibX.launch
@@ -21,11 +21,12 @@
-
+
+
diff --git a/config/alfresco/authentication-services-context.xml b/config/alfresco/authentication-services-context.xml
index 846c2642b6..19c1d3e0a4 100644
--- a/config/alfresco/authentication-services-context.xml
+++ b/config/alfresco/authentication-services-context.xml
@@ -208,6 +208,20 @@
-->
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -242,6 +256,9 @@
+
+
+
diff --git a/config/alfresco/avm-services-context.xml b/config/alfresco/avm-services-context.xml
index e1ef29c112..518779a809 100644
--- a/config/alfresco/avm-services-context.xml
+++ b/config/alfresco/avm-services-context.xml
@@ -91,12 +91,6 @@
-
-
-
-
-
-
@@ -106,12 +100,6 @@
-
-
-
-
-
-
@@ -149,15 +137,9 @@
-
-
-
-
-
-
diff --git a/config/alfresco/bootstrap-context.xml b/config/alfresco/bootstrap-context.xml
index 5a1a605d7a..2886791b52 100644
--- a/config/alfresco/bootstrap-context.xml
+++ b/config/alfresco/bootstrap-context.xml
@@ -64,15 +64,23 @@
+
+
+
+
+
+
@@ -518,8 +526,6 @@
-
-
@@ -529,15 +535,6 @@
-
-
-
-
-
-
-
-
-
@@ -578,5 +575,22 @@
${spaces.store}/${spaces.company_home.childname}
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/config/alfresco/bootstrap/system.xml b/config/alfresco/bootstrap/system.xml
index 018740746d..774b0c54d5 100644
--- a/config/alfresco/bootstrap/system.xml
+++ b/config/alfresco/bootstrap/system.xml
@@ -31,7 +31,7 @@
-
+ ${alfresco_user_store.adminusername}Administrator
@@ -41,7 +41,7 @@
bootstrapHomeFolderProvider
-
+ guest
diff --git a/config/alfresco/cache-context.xml b/config/alfresco/cache-context.xml
index d3036e2a74..99d52afe19 100644
--- a/config/alfresco/cache-context.xml
+++ b/config/alfresco/cache-context.xml
@@ -41,9 +41,6 @@
defaultCache
-
@@ -66,6 +63,78 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+ defaultCache
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.alfresco.cache.localeEntityTransactionalCache
+
+
+ 100
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.alfresco.cache.storeAndNodeIdCache
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.alfresco.storeAndNodeIdTransactionalCache
+
+
+ 500
+
+
+
diff --git a/config/alfresco/core-services-context.xml b/config/alfresco/core-services-context.xml
index 9063e686b5..210b6e07d5 100644
--- a/config/alfresco/core-services-context.xml
+++ b/config/alfresco/core-services-context.xml
@@ -1102,22 +1102,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-Extra.sql b/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-Extra.sql
index a188f1d2f4..4d4ceb5396 100644
--- a/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-Extra.sql
+++ b/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-Extra.sql
@@ -15,17 +15,20 @@
-- Explicit indexes and constraints not declared in the mappings
--
-CREATE INDEX fk_alf_na_qn ON alf_node_aspects (qname_id);
-ALTER TABLE alf_node_aspects ADD CONSTRAINT fk_alf_na_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
+CREATE INDEX fk_alf_nasp_qn ON alf_node_aspects (qname_id);
+ALTER TABLE alf_node_aspects ADD CONSTRAINT fk_alf_nasp_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
-CREATE INDEX fk_alf_np_qn ON alf_node_properties (qname_id);
-ALTER TABLE alf_node_properties ADD CONSTRAINT fk_alf_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
+CREATE INDEX fk_alf_nprop_qn ON alf_node_properties (qname_id);
+ALTER TABLE alf_node_properties ADD CONSTRAINT fk_alf_nprop_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
-CREATE INDEX fk_avm_na_qn ON avm_aspects_new (qname_id);
-ALTER TABLE avm_aspects_new ADD CONSTRAINT fk_avm_na_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
+CREATE INDEX fk_alf_nprop_loc ON alf_node_properties (locale_id);
+ALTER TABLE alf_node_properties ADD CONSTRAINT fk_alf_nprop_loc FOREIGN KEY (locale_id) REFERENCES alf_locale (id);
-CREATE INDEX fk_avm_np_qn ON avm_node_properties_new (qname_id);
-ALTER TABLE avm_node_properties_new ADD CONSTRAINT fk_avm_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
+CREATE INDEX fk_avm_nasp_qn ON avm_aspects (qname_id);
+ALTER TABLE avm_aspects ADD CONSTRAINT fk_avm_nasp_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
+
+CREATE INDEX fk_avm_nprop_qn ON avm_node_properties (qname_id);
+ALTER TABLE avm_node_properties ADD CONSTRAINT fk_avm_nprop_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
CREATE INDEX idx_avm_hl_revpk ON avm_history_links (descendent, ancestor);
diff --git a/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-MappedFKIndexes.sql b/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-MappedFKIndexes.sql
index 86cd9ed914..e70b358cd9 100644
--- a/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-MappedFKIndexes.sql
+++ b/config/alfresco/dbscripts/create/2.2/org.hibernate.dialect.Dialect/AlfrescoPostCreate-2.2-MappedFKIndexes.sql
@@ -54,8 +54,7 @@ CREATE INDEX fk_alf_na_tnode ON alf_node_assoc (target_node_id);
CREATE INDEX fk_alf_perm_tqn ON alf_permission (type_qname_id);
-CREATE INDEX fk_alf_n_prop ON alf_node_properties (node_id);
-CREATE INDEX fk_alf_np_attr ON alf_node_properties (attribute_value);
+CREATE INDEX fk_alf_nprop_n ON alf_node_properties (node_id);
CREATE INDEX fk_alf_ns_node ON alf_node_status (node_id);
CREATE INDEX fk_alf_ns_trans ON alf_node_status (transaction_id);
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/AlfrescoSchemaUpdate-OrclBLOB.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/AlfrescoSchemaUpdate-OrclBLOB.sql
new file mode 100644
index 0000000000..0e0ef4983a
--- /dev/null
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/AlfrescoSchemaUpdate-OrclBLOB.sql
@@ -0,0 +1,21 @@
+--
+-- Title: Change Oracle LONG RAW columns to BLOB
+-- Database: Generic
+-- Since: V2.2 Schema 92
+-- Author: Derek Hulley
+--
+-- Please contact support@alfresco.com if you need assistance with the upgrade.
+--
+-- No effect on non-Oracle DBs
+
+--
+-- Record script finish
+--
+DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.2-OrclBLOB';
+INSERT INTO alf_applied_patch
+ (id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
+ VALUES
+ (
+ 'patch.db-V2.2-OrclBLOB', 'Did nothing for non-Oracle DB.',
+ 0, 91, -1, 92, null, 'UNKOWN', 1, 1, 'Script completed'
+ );
\ No newline at end of file
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/AlfrescoSchemaUpdate-Person.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/AlfrescoSchemaUpdate-Person.sql
new file mode 100644
index 0000000000..ee3adbd85f
--- /dev/null
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/AlfrescoSchemaUpdate-Person.sql
@@ -0,0 +1,65 @@
+--
+-- Title: Move user name to be part of the association QNAME
+-- Database: Generic
+-- Since: V2.2 Schema 91
+-- Author: Andy Hind
+--
+-- Please contact support@alfresco.com if you need assistance with the upgrade.
+--
+-- Path was previously unused and unindex - new we use it the index is required.
+
+UPDATE
+ alf_child_assoc c
+ SET
+ c.qname_ns_id =
+ (
+ SELECT
+ id
+ FROM
+ alf_namespace n
+ WHERE
+ n.uri = 'http://www.alfresco.org/model/content/1.0'),
+ c.qname_localname =
+ (
+ SELECT
+ p.string_value
+ FROM
+ alf_node_properties p
+ JOIN alf_qname q on p.qname_id = q.id
+ JOIN alf_namespace n on q.ns_id = n.id
+ WHERE
+ p.node_id = c.child_node_id AND
+ q.local_name ='userName' AND n.uri = 'http://www.alfresco.org/model/content/1.0'
+ )
+ WHERE exists
+ (
+ SELECT
+ 0
+ FROM alf_node_properties pp
+ JOIN alf_qname qq on pp.qname_id = qq.id
+ JOIN alf_namespace nn on qq.ns_id = nn.id
+ WHERE
+ pp.node_id = c.child_node_id AND
+ qq.local_name ='userName' AND
+ nn.uri = 'http://www.alfresco.org/model/content/1.0'
+ )
+;
+
+-- Validation query
+-- select count(*) from alf_child_assoc c
+-- JOIN alf_node_properties pp ON c.child_node_id = pp.node_id AND c.qname_localname = pp.string_value
+-- JOIN alf_qname qq on pp.qname_id = qq.id
+-- JOIN alf_namespace nn on qq.ns_id = nn.id AND c.qname_ns_id = nn.id
+-- WHERE qq.local_name ='userName' AND nn.uri = 'http://www.alfresco.org/model/content/1.0'
+
+--
+-- Record script finish
+--
+DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.2-Person';
+INSERT INTO alf_applied_patch
+ (id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
+ VALUES
+ (
+ 'patch.db-V2.2-Person', 'Manually executed script upgrade V2.2: Person user name also in the association qname',
+ 0, 90, -1, 91, null, 'UNKOWN', 1, 1, 'Script completed'
+ );
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-4-extra-indexes-and-constraints.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-4-extra-indexes-and-constraints.sql
index 38ccfa48d3..904b865aaa 100644
--- a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-4-extra-indexes-and-constraints.sql
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-4-extra-indexes-and-constraints.sql
@@ -57,8 +57,11 @@ CREATE INDEX idx_alf_acl_inh ON alf_access_control_list (inherits, inherits_from
CREATE INDEX fk_alf_na_qn ON alf_node_aspects (qname_id);
ALTER TABLE alf_node_aspects ADD CONSTRAINT fk_alf_na_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
-CREATE INDEX fk_alf_np_qn ON alf_node_properties (qname_id);
-ALTER TABLE alf_node_properties ADD CONSTRAINT fk_alf_np_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
+-- alf_node_properties is fully rebuilt in another script
+-- CREATE INDEX fk_alf_nprop_qn ON alf_node_properties (qname_id);
+-- ALTER TABLE alf_node_properties ADD CONSTRAINT fk_alf_nprop_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
+-- CREATE INDEX fk_alf_nprop_loc ON alf_node_properties (locale_id);
+-- ALTER TABLE alf_node_properties ADD CONSTRAINT fk_alf_nprop_loc FOREIGN KEY (locale_id) REFERENCES alf_locale (id);
CREATE INDEX fk_avm_na_qn ON avm_aspects_new (qname_id);
ALTER TABLE avm_aspects_new ADD CONSTRAINT fk_avm_na_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id);
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-5-mapped-fk-indexes.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-5-mapped-fk-indexes.sql
index 96c3f249f8..41e0b40b8f 100644
--- a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-5-mapped-fk-indexes.sql
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Dialect/upgrade-5-mapped-fk-indexes.sql
@@ -54,8 +54,8 @@ CREATE INDEX fk_alf_na_tnode ON alf_node_assoc (target_node_id);
CREATE INDEX fk_alf_perm_tqn ON alf_permission (type_qname_id);
-CREATE INDEX fk_alf_n_prop ON alf_node_properties (node_id);
-CREATE INDEX fk_alf_np_attr ON alf_node_properties (attribute_value);
+-- alf_node_properties is fully rebuilt in another script
+-- CREATE INDEX fk_alf_nprop_n ON alf_node_properties (node_id);
CREATE INDEX fk_alf_ns_node ON alf_node_status (node_id);
CREATE INDEX fk_alf_ns_trans ON alf_node_status (transaction_id);
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql
index 4fea339df4..3fdb3faa33 100644
--- a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/AlfrescoSchemaUpdate-2.2-ACL.sql
@@ -112,17 +112,18 @@ ALTER TABLE alf_authority_alias ADD CONSTRAINT fk_alf_autha_aut FOREIGN KEY (aut
-- Tidy up unused cols on ace table and add the FK contstraint back
-- finish take out of ACL_ID
-ALTER TABLE alf_access_control_entry DROP INDEX FKFFF41F99B9553F6C, DROP FOREIGN KEY FKFFF41F99B9553F6C;
-ALTER TABLE alf_access_control_entry DROP INDEX FKFFF41F9960601995, DROP FOREIGN KEY FKFFF41F9960601995;
-ALTER TABLE alf_access_control_entry DROP COLUMN acl_id, DROP COLUMN authority_id;
ALTER TABLE alf_access_control_entry
- CHANGE auth_id authority_id BIGINT NOT NULL;
-CREATE INDEX fk_alf_ace_auth ON alf_access_control_entry (authority_id);
-ALTER TABLE alf_access_control_entry ADD CONSTRAINT fk_alf_ace_auth FOREIGN KEY (authority_id) REFERENCES alf_authority (id);
-CREATE INDEX fk_alf_ace_perm ON alf_access_control_entry (permission_id);
-ALTER TABLE alf_access_control_entry ADD CONSTRAINT fk_alf_ace_perm FOREIGN KEY (permission_id) REFERENCES alf_permission (id);
-CREATE INDEX fk_alf_ace_ctx ON alf_access_control_entry (context_id);
-ALTER TABLE alf_access_control_entry ADD CONSTRAINT fk_alf_ace_ctx FOREIGN KEY (context_id) REFERENCES alf_ace_context (id);
+ DROP INDEX FKFFF41F99B9553F6C, DROP FOREIGN KEY FKFFF41F99B9553F6C,
+ DROP INDEX FKFFF41F9960601995, DROP FOREIGN KEY FKFFF41F9960601995,
+ DROP COLUMN acl_id, DROP COLUMN authority_id,
+ CHANGE auth_id authority_id BIGINT NOT NULL,
+ ADD INDEX fk_alf_ace_auth (authority_id),
+ ADD CONSTRAINT fk_alf_ace_auth FOREIGN KEY (authority_id) REFERENCES alf_authority (id),
+ ADD INDEX fk_alf_ace_perm (permission_id),
+ ADD CONSTRAINT fk_alf_ace_perm FOREIGN KEY (permission_id) REFERENCES alf_permission (id),
+ ADD INDEX fk_alf_ace_ctx (context_id),
+ ADD CONSTRAINT fk_alf_ace_ctx FOREIGN KEY (context_id) REFERENCES alf_ace_context (id)
+;
CREATE TABLE alf_tmp_min_ace (
@@ -135,7 +136,17 @@ CREATE TABLE alf_tmp_min_ace (
) ENGINE=InnoDB;
INSERT INTO alf_tmp_min_ace (min, permission_id, authority_id, allowed, applies)
- SELECT min(ace1.id), ace1.permission_id, ace1.authority_id, ace1.allowed, ace1.applies FROM alf_access_control_entry ace1 group by ace1.permission_id, ace1.authority_id, ace1.allowed, ace1.applies;
+ SELECT
+ min(ace1.id),
+ ace1.permission_id,
+ ace1.authority_id,
+ ace1.allowed,
+ ace1.applies
+ FROM
+ alf_access_control_entry ace1
+ GROUP BY
+ ace1.permission_id, ace1.authority_id, ace1.allowed, ace1.applies
+;
-- Update members to point to the first use of an access control entry
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/upgrade-1-fulldm.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/upgrade-1-fulldm.sql
new file mode 100644
index 0000000000..3296e91af0
--- /dev/null
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/upgrade-1-fulldm.sql
@@ -0,0 +1,815 @@
+--
+-- Title: Apply all DM schema modifications
+-- Database: MySQL
+-- Since: V2.2 Schema 91
+-- Author: Derek Hulley
+--
+-- In order to streamline the upgrade, all modifications to large tables need to
+-- be handled in as few steps as possible. This usually involves as few ALTER TABLE
+-- statements as possible. The general approach is:
+-- Create a table with the correct structure, including indexes and CONSTRAINTs
+-- Copy pristine data into the new table
+-- Drop the old table
+-- Rename the new table
+--
+-- Please contact support@alfresco.com if you need assistance with the upgrade.
+--
+
+---------------------------------
+-- Build Namespaces and QNames --
+---------------------------------
+
+CREATE TABLE alf_namespace
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL,
+ uri VARCHAR(100) NOT NULL,
+ PRIMARY KEY (id),
+ UNIQUE (uri)
+) ENGINE=InnoDB;
+
+CREATE TABLE alf_qname
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL,
+ ns_id BIGINT NOT NULL,
+ local_name VARCHAR(200) NOT NULL,
+ INDEX fk_alf_qname_ns (ns_id),
+ CONSTRAINT fk_alf_qname_ns FOREIGN KEY (ns_id) REFERENCES alf_namespace (id),
+ PRIMARY KEY (id),
+ UNIQUE (ns_id, local_name)
+) ENGINE=InnoDB;
+
+-- Create temporary table for dynamic (child) QNames
+CREATE TABLE t_qnames_dyn
+(
+ qname VARCHAR(100) NOT NULL,
+ namespace VARCHAR(100),
+ namespace_id BIGINT,
+ local_name VARCHAR(100),
+ INDEX tidx_qnd_qn (qname),
+ INDEX tidx_qnd_ns (namespace)
+) ENGINE=InnoDB;
+
+-- Populate the table with the child association paths
+-- Query OK, 415312 rows affected (1 min 11.91 sec)
+INSERT INTO t_qnames_dyn (qname)
+(
+ SELECT distinct(qname) FROM alf_child_assoc
+);
+
+-- Extract the Namespace
+-- Query OK, 415312 rows affected (20.03 sec)
+UPDATE t_qnames_dyn SET namespace = CONCAT('FILLER-', SUBSTR(SUBSTRING_INDEX(qname, '}', 1), 2));
+
+-- Extract the Localname
+-- Query OK, 415312 rows affected (16.22 sec)
+UPDATE t_qnames_dyn SET local_name = SUBSTRING_INDEX(qname, '}', -1);
+
+-- Move the namespaces to the their new home
+-- Query OK, 4 rows affected (34.59 sec)
+INSERT INTO alf_namespace (uri, version)
+(
+ SELECT
+ distinct(x.namespace), 1
+ FROM
+ (
+ SELECT t.namespace, n.uri FROM t_qnames_dyn t LEFT OUTER JOIN alf_namespace n ON (n.uri = t.namespace)
+ ) x
+ WHERE
+ x.uri IS NULL
+);
+
+-- Record the new namespace IDs
+-- Query OK, 415312 rows affected (10.41 sec)
+UPDATE t_qnames_dyn t SET t.namespace_id = (SELECT ns.id FROM alf_namespace ns WHERE ns.uri = t.namespace);
+
+-- Recoup some storage
+ALTER TABLE t_qnames_dyn DROP COLUMN namespace;
+OPTIMIZE TABLE t_qnames_dyn;
+
+-- Create temporary table to hold static QNames
+CREATE TABLE t_qnames
+(
+ qname VARCHAR(200) NOT NULL,
+ namespace VARCHAR(100),
+ localname VARCHAR(100),
+ qname_id BIGINT,
+ INDEX tidx_tqn_qn (qname),
+ INDEX tidx_tqn_ns (namespace),
+ INDEX tidx_tqn_ln (localname)
+) ENGINE=InnoDB;
+
+-- Populate the table with all known static QNames
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.type_qname FROM alf_node s LEFT OUTER JOIN t_qnames t ON (s.type_qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.qname FROM alf_node_aspects s LEFT OUTER JOIN t_qnames t ON (s.qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.qname FROM alf_node_properties s LEFT OUTER JOIN t_qnames t ON (s.qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.qname FROM avm_aspects s LEFT OUTER JOIN t_qnames t ON (s.qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.name FROM avm_aspects_new s LEFT OUTER JOIN t_qnames t ON (s.name = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.qname FROM avm_node_properties s LEFT OUTER JOIN t_qnames t ON (s.qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.qname FROM avm_node_properties_new s LEFT OUTER JOIN t_qnames t ON (s.qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.qname FROM avm_store_properties s LEFT OUTER JOIN t_qnames t ON (s.qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.type_qname FROM alf_node_assoc s LEFT OUTER JOIN t_qnames t ON (s.type_qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.type_qname FROM alf_child_assoc s LEFT OUTER JOIN t_qnames t ON (s.type_qname = t.qname) WHERE t.qname IS NULL
+);
+INSERT INTO t_qnames (qname)
+(
+ SELECT DISTINCT s.type_qname FROM alf_permission s LEFT OUTER JOIN t_qnames t ON (s.type_qname = t.qname) WHERE t.qname IS NULL
+);
+
+-- Extract the namespace and localnames from the QNames
+UPDATE t_qnames SET namespace = CONCAT('FILLER-', SUBSTR(SUBSTRING_INDEX(qname, '}', 1), 2));
+UPDATE t_qnames SET localname = SUBSTRING_INDEX(qname, '}', -1);
+
+-- Move the Namespaces to their new home
+INSERT INTO alf_namespace (uri, version)
+(
+ SELECT
+ distinct(x.namespace), 1
+ FROM
+ (
+ SELECT t.namespace, n.uri FROM t_qnames t LEFT OUTER JOIN alf_namespace n ON (n.uri = t.namespace)
+ ) x
+ WHERE
+ x.uri IS NULL
+);
+
+-- Move the Localnames to their new home
+INSERT INTO alf_qname (ns_id, local_name, version)
+(
+ SELECT
+ x.ns_id, x.t_localname, 1
+ FROM
+ (
+ SELECT n.id AS ns_id, t.localname AS t_localname, q.local_name AS q_localname
+ FROM t_qnames t
+ JOIN alf_namespace n ON (n.uri = t.namespace)
+ LEFT OUTER JOIN alf_qname q ON (q.local_name = t.localname)
+ ) x
+ WHERE
+ q_localname IS NULL
+ GROUP BY x.ns_id, x.t_localname
+);
+
+-- Record the new qname IDs
+UPDATE t_qnames t SET t.qname_id =
+(
+ SELECT q.id FROM alf_qname q
+ JOIN alf_namespace ns ON (q.ns_id = ns.id)
+ WHERE ns.uri = t.namespace AND q.local_name = t.localname
+);
+
+------------------------------
+-- Populate the Permissions --
+------------------------------
+
+-- This is a small table so we change it in place
+ALTER TABLE alf_permission DROP INDEX type_qname;
+ALTER TABLE alf_permission ADD COLUMN type_qname_id BIGINT NULL AFTER id;
+UPDATE alf_permission p SET p.type_qname_id =
+(
+ SELECT q.id
+ FROM alf_qname q
+ JOIN alf_namespace ns ON (q.ns_id = ns.id)
+ WHERE CONCAT('{', SUBSTR(ns.uri, 8), '}', q.local_name) = p.type_qname
+);
+ALTER TABLE alf_permission DROP COLUMN type_qname;
+ALTER TABLE alf_permission MODIFY COLUMN type_qname_id BIGINT NOT NULL AFTER id;
+ALTER TABLE alf_permission ADD UNIQUE (type_qname_id, name);
+
+---------------------
+-- Build new Store --
+---------------------
+
+CREATE TABLE t_alf_store
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL,
+ protocol VARCHAR(50) NOT NULL,
+ identifier VARCHAR(100) NOT NULL,
+ root_node_id BIGINT,
+ PRIMARY KEY (id),
+ UNIQUE (protocol, identifier)
+) TYPE=InnoDB;
+
+CREATE TABLE t_alf_node (
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL,
+ store_id BIGINT NOT NULL,
+ uuid VARCHAR(36) NOT NULL,
+ transaction_id BIGINT NOT NULL,
+ node_deleted bit NOT NULL,
+ type_qname_id BIGINT NOT NULL,
+ acl_id BIGINT,
+ audit_creator VARCHAR(255) NOT NULL,
+ audit_created VARCHAR(30) NOT NULL,
+ audit_modifier VARCHAR(255) NOT NULL,
+ audit_modified VARCHAR(30) NOT NULL,
+ audit_accessed VARCHAR(30),
+ INDEX idx_alf_node_del (node_deleted),
+ INDEX fk_alf_node_acl (acl_id),
+ INDEX fk_alf_node_tqn (type_qname_id),
+ INDEX fk_alf_node_txn (transaction_id),
+ INDEX fk_alf_node_store (store_id),
+ CONSTRAINT fk_alf_node_acl FOREIGN KEY (acl_id) REFERENCES alf_access_control_list (id),
+ CONSTRAINT fk_alf_node_tqn FOREIGN KEY (type_qname_id) REFERENCES alf_qname (id),
+ CONSTRAINT fk_alf_node_txn FOREIGN KEY (transaction_id) REFERENCES alf_transaction (id),
+ CONSTRAINT fk_alf_node_store FOREIGN KEY (store_id) REFERENCES t_alf_store (id),
+ PRIMARY KEY (id),
+ UNIQUE (store_id, uuid)
+) TYPE=InnoDB;
+
+-- Fill the store table
+INSERT INTO t_alf_store (version, protocol, identifier, root_node_id)
+ SELECT 1, protocol, identifier, root_node_id FROM alf_store
+;
+
+----------------------------
+-- Populate the new nodes --
+----------------------------
+
+-- Query OK, 830222 rows affected (2 min 18.96 sec)
+INSERT INTO t_alf_node
+ (
+ id, version, store_id, uuid, transaction_id, node_deleted, type_qname_id,
+ audit_creator, audit_created, audit_modifier, audit_modified
+ )
+ SELECT
+ n.id, 1, s.id, n.uuid, nstat.transaction_id, false, q.qname_id,
+ 'unknown', '2008-09-17T02:23:37.212+01:00', 'unkown', '2008-09-17T02:23:37.212+01:00'
+ FROM
+ t_qnames q
+ JOIN alf_node n ON (q.qname = n.type_qname)
+ JOIN alf_node_status nstat ON (nstat.node_id = n.id)
+ JOIN t_alf_store s ON (s.protocol = nstat.protocol AND s.identifier = nstat.identifier)
+;
+
+-- Hook the store up to the root node
+ALTER TABLE t_alf_store
+ ADD INDEX fk_alf_store_root (root_node_id),
+ ADD CONSTRAINT fk_alf_store_root FOREIGN KEY (root_node_id) REFERENCES t_alf_node (id)
+;
+
+-------------------------------
+-- Populate the Child Assocs --
+-------------------------------
+
+CREATE TABLE t_alf_child_assoc
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL,
+ parent_node_id BIGINT NOT NULL,
+ type_qname_id BIGINT NOT NULL,
+ child_node_name VARCHAR(50) NOT NULL,
+ child_node_name_crc BIGINT NOT NULL,
+ child_node_id BIGINT NOT NULL,
+ qname_ns_id BIGINT NOT NULL,
+ qname_localname VARCHAR(100) NOT NULL,
+ is_primary BIT,
+ assoc_index INTEGER,
+ INDEX idx_alf_cass_qnln (qname_localname),
+ INDEX fk_alf_cass_pnode (parent_node_id),
+ INDEX fk_alf_cass_cnode (child_node_id),
+ INDEX fk_alf_cass_tqn (type_qname_id),
+ INDEX fk_alf_cass_qnns (qname_ns_id),
+ CONSTRAINT fk_alf_cass_pnode foreign key (parent_node_id) REFERENCES t_alf_node (id),
+ CONSTRAINT fk_alf_cass_cnode foreign key (child_node_id) REFERENCES t_alf_node (id),
+ CONSTRAINT fk_alf_cass_tqn foreign key (type_qname_id) REFERENCES alf_qname (id),
+ CONSTRAINT fk_alf_cass_qnns foreign key (qname_ns_id) REFERENCES alf_namespace (id),
+ PRIMARY KEY (id),
+ UNIQUE (parent_node_id, type_qname_id, child_node_name, child_node_name_crc)
+) TYPE=InnoDB;
+
+-- Query OK, 830217 rows affected (11 min 59.10 sec)
+INSERT INTO t_alf_child_assoc
+ (
+ id, version,
+ parent_node_id, child_node_id,
+ child_node_name, child_node_name_crc,
+ type_qname_id,
+ qname_ns_id, qname_localname,
+ is_primary, assoc_index
+ )
+ SELECT
+ ca.id, 1,
+ ca.parent_node_id, ca.child_node_id,
+ ca.child_node_name, child_node_name_crc,
+ tqn.qname_id,
+ tqndyn.namespace_id, tqndyn.local_name,
+ ca.is_primary, ca.assoc_index
+ FROM
+ alf_child_assoc ca
+ JOIN t_qnames_dyn tqndyn ON (ca.qname = tqndyn.qname)
+ JOIN t_qnames tqn ON (ca.type_qname = tqn.qname)
+;
+
+-- Clean up
+DROP TABLE t_qnames_dyn;
+DROP TABLE alf_child_assoc;
+ALTER TABLE t_alf_child_assoc RENAME TO alf_child_assoc;
+
+------------------------------
+-- Populate the Node Assocs --
+------------------------------
+
+CREATE TABLE t_alf_node_assoc
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL,
+ source_node_id BIGINT NOT NULL,
+ target_node_id BIGINT NOT NULL,
+ type_qname_id BIGINT NOT NULL,
+ INDEX fk_alf_nass_snode (source_node_id),
+ INDEX fk_alf_nass_tnode (target_node_id),
+ INDEX fk_alf_nass_tqn (type_qname_id),
+ CONSTRAINT fk_alf_nass_snode FOREIGN KEY (source_node_id) REFERENCES t_alf_node (id),
+ CONSTRAINT fk_alf_nass_tnode FOREIGN KEY (target_node_id) REFERENCES t_alf_node (id),
+ CONSTRAINT fk_alf_nass_tqn FOREIGN KEY (type_qname_id) REFERENCES alf_qname (id),
+ PRIMARY KEY (id),
+ UNIQUE (source_node_id, target_node_id, type_qname_id)
+) TYPE=InnoDB;
+
+INSERT INTO t_alf_node_assoc
+ (
+ id, version,
+ source_node_id, target_node_id,
+ type_qname_id
+ )
+ SELECT
+ na.id, 1,
+ na.source_node_id, na.source_node_id,
+ tqn.qname_id
+ FROM
+ alf_node_assoc na
+ JOIN t_qnames tqn ON (na.type_qname = tqn.qname)
+;
+
+-- Clean up
+DROP TABLE alf_node_assoc;
+ALTER TABLE t_alf_node_assoc RENAME TO alf_node_assoc;
+
+-------------------------------
+-- Populate the Node Aspects --
+-------------------------------
+
+CREATE TABLE t_alf_node_aspects
+(
+ node_id BIGINT NOT NULL,
+ qname_id BIGINT NOT NULL,
+ INDEX fk_alf_nasp_n (node_id),
+ INDEX fk_alf_nasp_qn (qname_id),
+ CONSTRAINT fk_alf_nasp_n FOREIGN KEY (node_id) REFERENCES t_alf_node (id),
+ CONSTRAINT fk_alf_nasp_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id),
+ PRIMARY KEY (node_id, qname_id)
+) TYPE=InnoDB;
+
+-- Note the omission of sys:referencable and cm:auditable. These are implicit.
+-- Query OK, 415051 rows affected (17.59 sec)
+INSERT INTO t_alf_node_aspects
+ (
+ node_id, qname_id
+ )
+ SELECT
+ na.node_id,
+ tqn.qname_id
+ FROM
+ alf_node_aspects na
+ JOIN t_qnames tqn ON (na.qname = tqn.qname)
+ WHERE
+ tqn.qname NOT IN
+ (
+ '{http://www.alfresco.org/model/system/1.0}referenceable',
+ '{http://www.alfresco.org/model/content/1.0}auditable'
+ )
+;
+
+-- Clean up
+DROP TABLE alf_node_aspects;
+ALTER TABLE t_alf_node_aspects RENAME TO alf_node_aspects;
+
+-----------------------------------
+-- Populate the AVM Node Aspects --
+-----------------------------------
+
+CREATE TABLE t_avm_aspects
+(
+ node_id BIGINT NOT NULL,
+ qname_id BIGINT NOT NULL,
+ INDEX fk_avm_nasp_n (node_id),
+ INDEX fk_avm_nasp_qn (qname_id),
+ CONSTRAINT fk_avm_nasp_n FOREIGN KEY (node_id) REFERENCES avm_nodes (id),
+ CONSTRAINT fk_avm_nasp_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id),
+ PRIMARY KEY (node_id, qname_id)
+) TYPE=InnoDB;
+
+INSERT INTO t_avm_aspects
+ (
+ node_id, qname_id
+ )
+ SELECT
+ aspects_old.node_id,
+ tqn.qname_id
+ FROM
+ avm_aspects aspects_old
+ JOIN t_qnames tqn ON (aspects_old.qname = tqn.qname)
+;
+INSERT INTO t_avm_aspects
+ (
+ node_id, qname_id
+ )
+ SELECT
+ anew.id,
+ tqn.qname_id
+ FROM
+ avm_aspects_new anew
+ JOIN t_qnames tqn ON (anew.name = tqn.qname)
+ LEFT JOIN avm_aspects aold ON (anew.id = aold.node_id AND anew.name = aold.qname)
+ WHERE
+ aold.id IS NULL
+;
+
+-- Clean up
+DROP TABLE avm_aspects;
+DROP TABLE avm_aspects_new;
+ALTER TABLE t_avm_aspects RENAME TO avm_aspects;
+
+------------------------------------
+-- Migrate Sundry Property Tables --
+------------------------------------
+
+-- Create temporary mapping for property types
+CREATE TABLE t_prop_types
+(
+ type_name VARCHAR(15) NOT NULL,
+ type_id INTEGER NOT NULL,
+ PRIMARY KEY (type_name)
+);
+INSERT INTO t_prop_types values ('NULL', 0);
+INSERT INTO t_prop_types values ('BOOLEAN', 1);
+INSERT INTO t_prop_types values ('INTEGER', 2);
+INSERT INTO t_prop_types values ('LONG', 3);
+INSERT INTO t_prop_types values ('FLOAT', 4);
+INSERT INTO t_prop_types values ('DOUBLE', 5);
+INSERT INTO t_prop_types values ('STRING', 6);
+INSERT INTO t_prop_types values ('DATE', 7);
+INSERT INTO t_prop_types values ('DB_ATTRIBUTE', 8);
+INSERT INTO t_prop_types values ('SERIALIZABLE', 9);
+INSERT INTO t_prop_types values ('MLTEXT', 10);
+INSERT INTO t_prop_types values ('CONTENT', 11);
+INSERT INTO t_prop_types values ('NODEREF', 12);
+INSERT INTO t_prop_types values ('CHILD_ASSOC_REF', 13);
+INSERT INTO t_prop_types values ('ASSOC_REF', 14);
+INSERT INTO t_prop_types values ('QNAME', 15);
+INSERT INTO t_prop_types values ('PATH', 16);
+INSERT INTO t_prop_types values ('LOCALE', 17);
+INSERT INTO t_prop_types values ('VERSION_NUMBER', 18);
+
+-- Modify the avm_node_properties_new table
+CREATE TABLE t_avm_node_properties_new
+(
+ node_id BIGINT NOT NULL,
+ actual_type_n INTEGER NOT NULL,
+ persisted_type_n INTEGER NOT NULL,
+ multi_valued BIT NOT NULL,
+ boolean_value BIT,
+ long_value BIGINT,
+ float_value FLOAT,
+ double_value DOUBLE PRECISION,
+ string_value TEXT,
+ serializable_value BLOB,
+ qname_id BIGINT NOT NULL,
+ INDEX fk_avm_nprop_n (node_id),
+ INDEX fk_avm_nprop_qn (qname_id),
+ CONSTRAINT fk_avm_nprop_n FOREIGN KEY (node_id) REFERENCES avm_nodes (id),
+ CONSTRAINT fk_avm_nprop_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id),
+ PRIMARY KEY (node_id, qname_id)
+) TYPE=InnoDB;
+INSERT INTO t_avm_node_properties_new
+ (
+ node_id,
+ qname_id,
+ actual_type_n, persisted_type_n,
+ multi_valued, boolean_value, long_value, float_value, double_value, string_value, serializable_value
+ )
+ SELECT
+ p.node_id,
+ tqn.qname_id,
+ ptypes_actual.type_id, ptypes_persisted.type_id,
+ p.multi_valued, p.boolean_value, p.long_value, p.float_value, p.double_value, p.string_value, p.serializable_value
+ FROM
+ avm_node_properties_new p
+ JOIN t_qnames tqn ON (p.qname = tqn.qname)
+ JOIN t_prop_types ptypes_actual ON (ptypes_actual.type_name = p.actual_type)
+ JOIN t_prop_types ptypes_persisted ON (ptypes_persisted.type_name = p.persisted_type)
+;
+DROP TABLE avm_node_properties_new;
+ALTER TABLE t_avm_node_properties_new RENAME TO avm_node_properties_new;
+
+-- Modify the avm_store_properties table
+CREATE TABLE t_avm_store_properties
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ avm_store_id BIGINT,
+ qname_id BIGINT NOT NULL,
+ actual_type_n integer NOT NULL,
+ persisted_type_n integer NOT NULL,
+ multi_valued bit NOT NULL,
+ boolean_value bit,
+ long_value BIGINT,
+ float_value float,
+ double_value DOUBLE PRECISION,
+ string_value TEXT,
+ serializable_value blob,
+ INDEX fk_avm_sprop_store (avm_store_id),
+ INDEX fk_avm_sprop_qname (qname_id),
+ CONSTRAINT fk_avm_sprop_store FOREIGN KEY (avm_store_id) REFERENCES avm_stores (id),
+ CONSTRAINT fk_avm_sprop_qname FOREIGN KEY (qname_id) REFERENCES alf_qname (id),
+ PRIMARY KEY (id)
+) TYPE=InnoDB;
+INSERT INTO t_avm_store_properties
+ (
+ avm_store_id,
+ qname_id,
+ actual_type_n, persisted_type_n,
+ multi_valued, boolean_value, long_value, float_value, double_value, string_value, serializable_value
+ )
+ SELECT
+ p.avm_store_id,
+ tqn.qname_id,
+ ptypes_actual.type_id, ptypes_persisted.type_id,
+ p.multi_valued, p.boolean_value, p.long_value, p.float_value, p.double_value, p.string_value, p.serializable_value
+ FROM
+ avm_store_properties p
+ JOIN t_qnames tqn ON (p.qname = tqn.qname)
+ JOIN t_prop_types ptypes_actual ON (ptypes_actual.type_name = p.actual_type)
+ JOIN t_prop_types ptypes_persisted ON (ptypes_persisted.type_name = p.persisted_type)
+;
+DROP TABLE avm_store_properties;
+ALTER TABLE t_avm_store_properties RENAME TO avm_store_properties;
+
+-- Modify the avm_node_properties table
+-- This table is old, so the data will be extracte and it will be replaced
+CREATE TABLE t_avm_node_properties
+(
+ node_id BIGINT NOT NULL,
+ actual_type_n INTEGER NOT NULL,
+ persisted_type_n INTEGER NOT NULL,
+ multi_valued BIT NOT NULL,
+ boolean_value BIT,
+ long_value BIGINT,
+ float_value FLOAT,
+ double_value DOUBLE PRECISION,
+ string_value TEXT,
+ serializable_value BLOB,
+ qname_id BIGINT NOT NULL,
+ PRIMARY KEY (node_id, qname_id)
+) TYPE=InnoDB;
+INSERT INTO t_avm_node_properties
+ (
+ node_id,
+ qname_id,
+ actual_type_n, persisted_type_n,
+ multi_valued, boolean_value, long_value, float_value, double_value, string_value, serializable_value
+ )
+ SELECT
+ p.node_id,
+ tqn.qname_id,
+ ptypes_actual.type_id, ptypes_persisted.type_id,
+ p.multi_valued, p.boolean_value, p.long_value, p.float_value, p.double_value, p.string_value, p.serializable_value
+ FROM
+ avm_node_properties p
+ JOIN t_qnames tqn ON (p.qname = tqn.qname)
+ JOIN t_prop_types ptypes_actual ON (ptypes_actual.type_name = p.actual_type)
+ JOIN t_prop_types ptypes_persisted ON (ptypes_persisted.type_name = p.persisted_type)
+;
+-- Copy values to new table. Duplicates are avoided just in case.
+INSERT INTO avm_node_properties_new
+ (
+ node_id,
+ qname_id,
+ actual_type_n, persisted_type_n,
+ multi_valued, boolean_value, long_value, float_value, double_value, string_value, serializable_value
+ )
+ SELECT
+ p.node_id,
+ p.qname_id,
+ p.actual_type_n, p.persisted_type_n,
+ p.multi_valued, p.boolean_value, p.long_value, p.float_value, p.double_value, p.string_value, p.serializable_value
+ FROM
+ t_avm_node_properties p
+ LEFT OUTER JOIN avm_node_properties_new pnew ON (pnew.node_id = p.node_id AND pnew.qname_id = p.qname_id)
+ WHERE
+ pnew.qname_id is null
+;
+DROP TABLE t_avm_node_properties;
+DROP TABLE avm_node_properties;
+ALTER TABLE avm_node_properties_new RENAME TO avm_node_properties;
+
+
+-------------------
+-- Build Locales --
+-------------------
+
+CREATE TABLE alf_locale
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL DEFAULT 1,
+ locale_str VARCHAR(20) NOT NULL,
+ PRIMARY KEY (id),
+ UNIQUE (locale_str)
+) TYPE=InnoDB;
+
+INSERT INTO alf_locale (id, locale_str) VALUES (1, '.default');
+
+-- Locales come from the attribute table which was used to support MLText persistence
+-- Query OK, 0 rows affected (17.22 sec)
+INSERT INTO alf_locale (locale_str)
+ SELECT DISTINCT(ma.mkey)
+ FROM alf_node_properties np
+ JOIN alf_attributes a1 ON (np.attribute_value = a1.id)
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = a1.id)
+;
+
+---------------------------------
+-- Migrate ADM Property Tables --
+---------------------------------
+
+CREATE TABLE t_alf_node_properties
+(
+ node_id BIGINT NOT NULL,
+ qname_id BIGINT NOT NULL,
+ locale_id BIGINT NOT NULL,
+ list_index smallint NOT NULL,
+ actual_type_n INTEGER NOT NULL,
+ persisted_type_n INTEGER NOT NULL,
+ boolean_value BIT,
+ long_value BIGINT,
+ float_value FLOAT,
+ double_value DOUBLE PRECISION,
+ string_value TEXT,
+ serializable_value BLOB,
+ INDEX fk_alf_nprop_n (node_id),
+ INDEX fk_alf_nprop_qn (qname_id),
+ INDEX fk_alf_nprop_loc (locale_id),
+ CONSTRAINT fk_alf_nprop_n FOREIGN KEY (node_id) REFERENCES t_alf_node (id),
+ CONSTRAINT fk_alf_nprop_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id),
+ CONSTRAINT fk_alf_nprop_loc FOREIGN KEY (locale_id) REFERENCES alf_locale (id),
+ PRIMARY KEY (node_id, qname_id, list_index, locale_id)
+) TYPE=InnoDB;
+
+-- Copy all simple values over
+-- Query OK, 2905008 rows affected (7 min 11.49 sec)
+INSERT INTO t_alf_node_properties
+ (
+ node_id, qname_id, list_index, locale_id,
+ actual_type_n, persisted_type_n,
+ boolean_value, long_value, float_value, double_value,
+ string_value,
+ serializable_value
+ )
+ SELECT
+ np.node_id, tqn.qname_id, -1, 1,
+ ptypes_actual.type_id, ptypes_persisted.type_id,
+ np.boolean_value, np.long_value, np.float_value, np.double_value,
+ np.string_value,
+ np.serializable_value
+ FROM
+ alf_node_properties np
+ JOIN t_qnames tqn ON (np.qname = tqn.qname)
+ JOIN t_prop_types ptypes_actual ON (ptypes_actual.type_name = np.actual_type)
+ JOIN t_prop_types ptypes_persisted ON (ptypes_persisted.type_name = np.persisted_type)
+ WHERE
+ np.attribute_value is null AND
+ tqn.qname NOT IN
+ (
+ '{http://www.alfresco.org/model/content/1.0}created',
+ '{http://www.alfresco.org/model/content/1.0}creator',
+ '{http://www.alfresco.org/model/content/1.0}modified',
+ '{http://www.alfresco.org/model/content/1.0}modifier'
+ )
+;
+
+-- Copy all MLText values over
+INSERT INTO t_alf_node_properties
+ (
+ node_id, qname_id, list_index, locale_id,
+ actual_type_n, persisted_type_n,
+ boolean_value, long_value, float_value, double_value,
+ string_value,
+ serializable_value
+ )
+ SELECT
+ np.node_id, tqn.qname_id, -1, loc.id,
+ -1, 0,
+ FALSE, 0, 0, 0,
+ a2.string_value,
+ a2.serializable_value
+ FROM
+ alf_node_properties np
+ JOIN t_qnames tqn ON (np.qname = tqn.qname)
+ JOIN alf_attributes a1 ON (np.attribute_value = a1.id)
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = a1.id)
+ JOIN alf_locale loc ON (ma.mkey = loc.locale_str)
+ JOIN alf_attributes a2 ON (ma.attribute_id = a2.id)
+; -- (OPTIONAL)
+UPDATE t_alf_node_properties
+ SET actual_type_n = 6, persisted_type_n = 6, serializable_value = NULL
+ WHERE actual_type_n = -1 AND string_value IS NOT NULL
+;
+UPDATE t_alf_node_properties
+ SET actual_type_n = 9, persisted_type_n = 9
+ WHERE actual_type_n = -1 AND serializable_value IS NOT NULL
+;
+
+-- Delete the node properties and move the fixed values over
+DROP TABLE alf_node_properties;
+ALTER TABLE t_alf_node_properties RENAME TO alf_node_properties;
+
+CREATE TABLE t_del_attributes
+(
+ id BIGINT NOT NULL,
+ PRIMARY KEY (id)
+);
+INSERT INTO t_del_attributes
+ SELECT id FROM alf_attributes WHERE type = 'M'
+;
+DELETE t_del_attributes
+ FROM t_del_attributes
+ JOIN alf_map_attribute_entries ma ON (ma.attribute_id = t_del_attributes.id)
+;
+DELETE t_del_attributes
+ FROM t_del_attributes
+ JOIN alf_list_attribute_entries la ON (la.attribute_id = t_del_attributes.id)
+;
+DELETE t_del_attributes
+ FROM t_del_attributes
+ JOIN alf_global_attributes ga ON (ga.attribute = t_del_attributes.id)
+;
+INSERT INTO t_del_attributes
+ SELECT a.id FROM t_del_attributes t
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = t.id)
+ JOIN alf_attributes a ON (ma.attribute_id = a.id)
+;
+DELETE alf_map_attribute_entries
+ FROM alf_map_attribute_entries
+ JOIN t_del_attributes t ON (alf_map_attribute_entries.map_id = t.id)
+;
+DELETE alf_attributes
+ FROM alf_attributes
+ JOIN t_del_attributes t ON (alf_attributes.id = t.id)
+;
+DROP TABLE t_del_attributes;
+
+--------------------
+-- Final clean up --
+--------------------
+DROP TABLE t_qnames;
+DROP TABLE t_prop_types;
+DROP TABLE alf_node_status;
+ALTER TABLE alf_store DROP INDEX FKBD4FF53D22DBA5BA, DROP FOREIGN KEY FKBD4FF53D22DBA5BA; -- (OPTIONAL)
+ALTER TABLE alf_store DROP FOREIGN KEY alf_store_root; -- (OPTIONAL)
+DROP TABLE alf_node;
+ALTER TABLE t_alf_node RENAME TO alf_node;
+DROP TABLE alf_store;
+ALTER TABLE t_alf_store RENAME TO alf_store;
+
+--
+-- Record script finish
+--
+DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.2-1-FullDmUpgrade';
+INSERT INTO alf_applied_patch
+ (id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
+ VALUES
+ (
+ 'patch.db-V2.2-1-FullDmUpgrade', 'Manually executed script upgrade V2.2: ADM ',
+ 0, 85, -1, 91, null, 'UNKOWN', 1, 1, 'Script completed'
+ );
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/upgrade-1-mltext.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/upgrade-1-mltext.sql
new file mode 100644
index 0000000000..3fd6b5e70b
--- /dev/null
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.MySQLInnoDBDialect/upgrade-1-mltext.sql
@@ -0,0 +1,157 @@
+--
+-- Title: Pull MLText Values into Node Properties
+-- Database: MySQL
+-- Since: V2.2 Schema 91
+-- Author: Derek Hulley
+--
+-- MLText values must be pulled back from attributes into localizable properties.
+-- Several statements are not relevant to upgrades from below 77. These are optional.
+--
+-- Please contact support@alfresco.com if you need assistance with the upgrade.
+--
+
+CREATE TABLE alf_locale
+(
+ id BIGINT NOT NULL AUTO_INCREMENT,
+ version BIGINT NOT NULL DEFAULT 1,
+ locale_str VARCHAR(20) NOT NULL,
+ PRIMARY KEY (id),
+ UNIQUE (locale_str)
+) TYPE=InnoDB;
+
+INSERT INTO alf_locale (id, locale_str) VALUES (1, '.default');
+
+INSERT INTO alf_locale (locale_str)
+ SELECT DISTINCT(ma.mkey)
+ FROM alf_node_properties np
+ JOIN alf_attributes a1 ON (np.attribute_value = a1.id)
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = a1.id)
+; -- (OPTIONAL)
+
+-- Create a temporary table to hold the attribute_value information that needs replacing
+CREATE TABLE t_alf_node_properties
+(
+ node_id BIGINT NOT NULL,
+ qname_id BIGINT NOT NULL,
+ list_index integer NOT NULL,
+ locale_id BIGINT NOT NULL,
+ actual_type_n integer NOT NULL,
+ persisted_type_n integer NOT NULL,
+ boolean_value BIT,
+ long_value BIGINT,
+ float_value FLOAT,
+ double_value DOUBLE PRECISION,
+ string_value TEXT,
+ serializable_value BLOB,
+ INDEX fk_alf_nprop_n (node_id),
+ CONSTRAINT fk_alf_nprop_n FOREIGN KEY (node_id) REFERENCES alf_node (id),
+ INDEX fk_alf_nprop_qn (qname_id),
+ CONSTRAINT fk_alf_nprop_qn FOREIGN KEY (qname_id) REFERENCES alf_qname (id),
+ INDEX fk_alf_nprop_loc (locale_id),
+ CONSTRAINT fk_alf_nprop_loc FOREIGN KEY (locale_id) REFERENCES alf_locale (id),
+ PRIMARY KEY (node_id, qname_id, list_index, locale_id)
+) TYPE=InnoDB;
+
+-- Copy all simple values over
+INSERT INTO t_alf_node_properties
+ (
+ node_id, qname_id, list_index, locale_id,
+ actual_type_n, persisted_type_n,
+ boolean_value, long_value, float_value, double_value,
+ string_value,
+ serializable_value
+ )
+ SELECT
+ np.node_id, np.qname_id, -1, 1,
+ np.actual_type_n, np.persisted_type_n,
+ np.boolean_value, np.long_value, np.float_value, np.double_value,
+ np.string_value,
+ np.serializable_value
+ FROM alf_node_properties np
+ WHERE
+ np.attribute_value is null
+;
+
+-- Copy all MLText values over
+INSERT INTO t_alf_node_properties
+ (
+ node_id, qname_id, list_index, locale_id,
+ actual_type_n, persisted_type_n,
+ boolean_value, long_value, float_value, double_value,
+ string_value,
+ serializable_value
+ )
+ SELECT
+ np.node_id, np.qname_id, -1, loc.id,
+ -1, 0,
+ FALSE, 0, 0, 0,
+ a2.string_value,
+ a2.serializable_value
+ FROM alf_node_properties np
+ JOIN alf_attributes a1 ON (np.attribute_value = a1.id)
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = a1.id)
+ JOIN alf_locale loc ON (ma.mkey = loc.locale_str)
+ JOIN alf_attributes a2 ON (ma.attribute_id = a2.id)
+; -- (OPTIONAL)
+UPDATE t_alf_node_properties
+ SET actual_type_n = 6, persisted_type_n = 6, serializable_value = NULL
+ WHERE actual_type_n = -1 AND string_value IS NOT NULL
+;
+UPDATE t_alf_node_properties
+ SET actual_type_n = 9, persisted_type_n = 9
+ WHERE actual_type_n = -1 AND serializable_value IS NOT NULL
+;
+
+-- Delete the node properties and move the fixed values over
+DROP TABLE alf_node_properties;
+ALTER TABLE t_alf_node_properties RENAME TO alf_node_properties;
+
+-- Clean up unused attribute values
+
+CREATE TABLE t_del_attributes
+(
+ id BIGINT NOT NULL,
+ PRIMARY KEY (id)
+);
+INSERT INTO t_del_attributes
+ SELECT id FROM alf_attributes WHERE type = 'M'
+;
+DELETE t_del_attributes
+ FROM t_del_attributes
+ JOIN alf_map_attribute_entries ma ON (ma.attribute_id = t_del_attributes.id)
+;
+DELETE t_del_attributes
+ FROM t_del_attributes
+ JOIN alf_list_attribute_entries la ON (la.attribute_id = t_del_attributes.id)
+;
+DELETE t_del_attributes
+ FROM t_del_attributes
+ JOIN alf_global_attributes ga ON (ga.attribute = t_del_attributes.id)
+;
+INSERT INTO t_del_attributes
+ SELECT a.id FROM t_del_attributes t
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = t.id)
+ JOIN alf_attributes a ON (ma.attribute_id = a.id)
+;
+DELETE alf_map_attribute_entries
+ FROM alf_map_attribute_entries
+ JOIN t_del_attributes t ON (alf_map_attribute_entries.map_id = t.id)
+;
+DELETE alf_attributes
+ FROM alf_attributes
+ JOIN t_del_attributes t ON (alf_attributes.id = t.id)
+;
+DROP TABLE t_del_attributes;
+
+
+--
+-- Record script finish
+--
+DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.2-1-MLText';
+INSERT INTO alf_applied_patch
+ (id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
+ VALUES
+ (
+ 'patch.db-V2.2-1-MLText', 'Manually executed script upgrade V2.2: Moved MLText values',
+ 86, 90, -1, 91, null, 'UNKOWN', 1, 1, 'Script completed'
+ );
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Oracle9Dialect/AlfrescoSchemaUpdate-OrclBLOB.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Oracle9Dialect/AlfrescoSchemaUpdate-OrclBLOB.sql
new file mode 100644
index 0000000000..556d918402
--- /dev/null
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Oracle9Dialect/AlfrescoSchemaUpdate-OrclBLOB.sql
@@ -0,0 +1,27 @@
+--
+-- Title: Change Oracle LONG RAW columns to BLOB
+-- Database: Generic
+-- Since: V2.2 Schema 92
+-- Author: Derek Hulley
+--
+-- Please contact support@alfresco.com if you need assistance with the upgrade.
+
+-- TODO: This needs to be replaced with a creation of new tables, copying values over with TO_LOB and then
+-- renaming the values back.
+
+ALTER TABLE alf_attributes MODIFY (serializable_value BLOB NULL);
+ALTER TABLE avm_node_properties MODIFY (serializable_value BLOB NULL);
+ALTER TABLE avm_node_properties_new MODIFY (serializable_value BLOB NULL);
+ALTER TABLE avm_store_properties MODIFY (serializable_value BLOB NULL);
+
+--
+-- Record script finish
+--
+DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.2-OrclBLOB';
+INSERT INTO alf_applied_patch
+ (id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
+ VALUES
+ (
+ 'patch.db-V2.2-OrclBLOB', 'Modified serializable_value columns from LONG RAW to BLOB.',
+ 0, 91, -1, 92, null, 'UNKOWN', 1, 1, 'Script completed'
+ );
\ No newline at end of file
diff --git a/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Oracle9Dialect/upgrade-1-mltext.sql b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Oracle9Dialect/upgrade-1-mltext.sql
new file mode 100644
index 0000000000..35857f5bb9
--- /dev/null
+++ b/config/alfresco/dbscripts/upgrade/2.2/org.hibernate.dialect.Oracle9Dialect/upgrade-1-mltext.sql
@@ -0,0 +1,177 @@
+--
+-- Title: Pull MLText Values into Node Properties
+-- Database: Oracle
+-- Since: V2.2 Schema 90
+-- Author: Derek Hulley
+--
+-- MLText values must be pulled back from attributes into localizable properties.
+-- Several statements are not relevant to upgrades from below 77. These are optional.
+--
+-- Please contact support@alfresco.com if you need assistance with the upgrade.
+--
+
+CREATE TABLE alf_locale
+(
+ id NUMBER(19,0) DEFAULT 0 NOT NULL,
+ version NUMBER(19,0) DEFAULT 1 NOT NULL,
+ locale_str VARCHAR2(20 CHAR) NOT NULL,
+ UNIQUE (locale_str)
+);
+
+INSERT INTO alf_locale (id, locale_str) VALUES (1, '.default');
+
+INSERT INTO alf_locale (locale_str)
+(
+ SELECT DISTINCT(ma.mkey)
+ FROM alf_node_properties np
+ JOIN alf_attributes a1 ON (np.attribute_value = a1.id)
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = a1.id)
+); -- (OPTIONAL)
+UPDATE alf_locale SET id = hibernate_sequence.nextval WHERE id != 1;
+ALTER TABLE alf_locale ADD PRIMARY KEY (id);
+
+-- Create a temporary table to hold the attribute_value information that needs replacing
+CREATE TABLE t_alf_node_properties
+(
+ node_id NUMBER(19,0) NOT NULL,
+ qname_id NUMBER(19,0) NOT NULL,
+ list_index NUMBER(10,0) NOT NULL,
+ locale_id NUMBER(19,0) NOT NULL,
+ actual_type_n NUMBER(10,0) NOT NULL,
+ persisted_type_n NUMBER(10,0) NOT NULL,
+ boolean_value NUMBER(1,0),
+ long_value NUMBER(19,0),
+ float_value FLOAT,
+ double_value DOUBLE PRECISION,
+ string_value VARCHAR2(1024 char),
+ serializable_value BLOB,
+ CONSTRAINT fk_alf_nprop_n FOREIGN KEY (node_id) REFERENCES alf_node,
+ CONSTRAINT fk_alf_nprop_qn FOREIGN KEY (qname_id) REFERENCES alf_qname,
+ CONSTRAINT fk_alf_nprop_loc FOREIGN KEY (locale_id) REFERENCES alf_locale,
+ PRIMARY KEY (node_id, qname_id, list_index, locale_id)
+);
+CREATE INDEX fk_alf_nprop_n ON t_alf_node_properties (node_id);
+CREATE INDEX fk_alf_nprop_qn ON t_alf_node_properties (qname_id);
+CREATE INDEX fk_alf_nprop_loc ON t_alf_node_properties (locale_id);
+
+-- Copy all simple values over
+INSERT INTO t_alf_node_properties
+ (
+ node_id, qname_id, list_index, locale_id,
+ actual_type_n, persisted_type_n,
+ boolean_value, long_value, float_value, double_value,
+ string_value,
+ serializable_value
+ )
+ SELECT
+ np.node_id, np.qname_id, -1, 1,
+ np.actual_type_n, np.persisted_type_n,
+ np.boolean_value, np.long_value, np.float_value, np.double_value,
+ np.string_value,
+ TO_LOB(np.serializable_value)
+ FROM alf_node_properties np
+ WHERE
+ np.attribute_value is null
+;
+
+-- Copy all MLText values over
+INSERT INTO t_alf_node_properties
+ (
+ node_id, qname_id, list_index, locale_id,
+ actual_type_n, persisted_type_n,
+ boolean_value, long_value, float_value, double_value,
+ string_value,
+ serializable_value
+ )
+ SELECT
+ np.node_id, np.qname_id, -1, loc.id,
+ -1, 0,
+ 0, 0, 0, 0,
+ a2.string_value,
+ TO_LOB(a2.serializable_value)
+ FROM alf_node_properties np
+ JOIN alf_attributes a1 ON (np.attribute_value = a1.id)
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = a1.id)
+ JOIN alf_locale loc ON (ma.mkey = loc.locale_str)
+ JOIN alf_attributes a2 ON (ma.attribute_id = a2.id)
+; -- (OPTIONAL)
+UPDATE t_alf_node_properties
+ SET actual_type_n = 6, persisted_type_n = 6, serializable_value = NULL
+ WHERE actual_type_n = -1 AND string_value IS NOT NULL
+;
+UPDATE t_alf_node_properties
+ SET actual_type_n = 9, persisted_type_n = 9
+ WHERE actual_type_n = -1 AND serializable_value IS NOT NULL
+;
+
+-- Delete the node properties and move the fixed values over
+DROP TABLE alf_node_properties;
+ALTER TABLE t_alf_node_properties RENAME TO alf_node_properties;
+
+-- Clean up unused attribute values
+
+CREATE TABLE t_del_attributes
+(
+ id NUMBER(19, 0) NOT NULL,
+ PRIMARY KEY (id)
+);
+INSERT INTO t_del_attributes
+(
+ SELECT id FROM alf_attributes WHERE type = 'M'
+)
+;
+DELETE
+ FROM t_del_attributes t
+ WHERE t.id =
+ (
+ SELECT ma.attribute_id FROM alf_map_attribute_entries ma WHERE ma.attribute_id = t.id
+ )
+;
+DELETE
+ FROM t_del_attributes t
+ WHERE t.id =
+ (
+ SELECT la.attribute_id FROM alf_list_attribute_entries la WHERE la.attribute_id = t.id
+ )
+;
+DELETE
+ FROM t_del_attributes t
+ WHERE t.id =
+ (
+ SELECT ga.attribute FROM alf_global_attributes ga WHERE ga.attribute = t.id
+ )
+;
+INSERT INTO t_del_attributes
+(
+ SELECT a.id FROM t_del_attributes t
+ JOIN alf_map_attribute_entries ma ON (ma.map_id = t.id)
+ JOIN alf_attributes a ON (ma.attribute_id = a.id)
+);
+DELETE
+ FROM alf_map_attribute_entries ma
+ WHERE ma.map_id =
+ (
+ SELECT t.id FROM t_del_attributes t WHERE t.id = ma.map_id
+ )
+;
+DELETE
+ FROM alf_attributes a
+ WHERE a.id =
+ (
+ SELECT t.id FROM t_del_attributes t WHERE t.id = a.id
+ )
+;
+DROP TABLE t_del_attributes;
+
+
+--
+-- Record script finish
+--
+DELETE FROM alf_applied_patch WHERE id = 'patch.db-V2.2-1-MLText';
+INSERT INTO alf_applied_patch
+ (id, description, fixes_from_schema, fixes_to_schema, applied_to_schema, target_schema, applied_on_date, applied_to_server, was_executed, succeeded, report)
+ VALUES
+ (
+ 'patch.db-V2.2-1-MLText', 'Manually executed script upgrade V2.2: Moved MLText values',
+ 0, 90, -1, 91, null, 'UNKOWN', 1, 1, 'Script completed'
+ );
\ No newline at end of file
diff --git a/config/alfresco/ehcache-default.xml b/config/alfresco/ehcache-default.xml
index 61444af599..31d0627fee 100644
--- a/config/alfresco/ehcache-default.xml
+++ b/config/alfresco/ehcache-default.xml
@@ -43,21 +43,14 @@
-
-
@@ -289,6 +282,12 @@
timeToLiveSeconds="300"
overflowToDisk="false"
/>
+
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.alfresco.repo.security.permissions.PermissionServiceSPI
+
+
+
+
+
+
+
+
+
+ ${server.transaction.mode.default}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ alfresco/model/permissionDefinitions.xml
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ROLE_
+
+
+
+
+
+
+
+
+
+ GROUP_
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ${system.acl.maxPermissionCheckTimeMillis}
+
+
+ ${system.acl.maxPermissionChecks}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/config/alfresco/hibernate-context.xml b/config/alfresco/hibernate-context.xml
index 5a7daf70d1..8348be39c8 100644
--- a/config/alfresco/hibernate-context.xml
+++ b/config/alfresco/hibernate-context.xml
@@ -54,9 +54,9 @@
+ org/alfresco/repo/domain/hibernate/Locale.hbm.xmlorg/alfresco/repo/domain/hibernate/QName.hbm.xmlorg/alfresco/repo/domain/hibernate/Node.hbm.xml
- org/alfresco/repo/domain/hibernate/Store.hbm.xmlorg/alfresco/repo/domain/hibernate/Transaction.hbm.xmlorg/alfresco/repo/domain/hibernate/VersionCount.hbm.xmlorg/alfresco/repo/domain/hibernate/AppliedPatch.hbm.xml
@@ -172,7 +172,6 @@
${cache.strategy}${cache.strategy}${cache.strategy}
- ${cache.strategy}${cache.strategy}${cache.strategy}${cache.strategy}
@@ -341,9 +340,18 @@
+
+
+
+
+
+
+
+
+
@@ -355,8 +363,7 @@
-
-
+
@@ -364,12 +371,32 @@
- dbNodeDaoServiceDirtySessionInterceptor
+ daoServiceDirtySessionInterceptordbNodeDaoServiceTxnRegistration
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ daoServiceDirtySessionInterceptor
+
+
+
+
diff --git a/config/alfresco/messages/schema-update.properties b/config/alfresco/messages/schema-update.properties
index 36a1824267..3843c26bdb 100644
--- a/config/alfresco/messages/schema-update.properties
+++ b/config/alfresco/messages/schema-update.properties
@@ -11,6 +11,7 @@ schema.update.msg.optional_statement_failed=Optional statement execution failed:
schema.update.warn.dialect_unsupported=Alfresco should not be used with database dialect {0}.
schema.update.warn.dialect_hsql=Alfresco is using the HSQL default database. Please only use this while evaluating Alfresco, it is NOT recommended for production or deployment!
schema.update.warn.dialect_derby=Alfresco is using the Apache Derby default database. Please only use this while evaluating Alfresco, it is NOT recommended for production or deployment!
+schema.update.warn.dialect_substituting=The dialect ''{0}'' is being changed to ''{1}''.
schema.update.err.found_multiple=\nMore than one Alfresco schema was found when querying the database metadata.\n Limit the database user's permissions or set the 'hibernate.default_schema' property in 'custom-hibernate-dialect.properties'.
schema.update.err.previous_failed=A previous schema upgrade failed or was not completed. Revert to the original database before attempting the upgrade again.
schema.update.err.statement_failed=Statement execution failed:\n SQL: {0}\n Error: {1}\n File: {2}\n Line: {3}
diff --git a/config/alfresco/patch/patch-services-context.xml b/config/alfresco/patch/patch-services-context.xml
index 3c48f650e6..420d4baed0 100644
--- a/config/alfresco/patch/patch-services-context.xml
+++ b/config/alfresco/patch/patch-services-context.xml
@@ -827,34 +827,6 @@
-
- patch.AVMAspects
- patch.AVMAspects.description
- 0
- 60
- 61
-
-
-
-
-
-
-
-
-
- patch.AVMProperties
- patch.AVMProperties.description
- 0
- 61
- 62
-
-
-
-
-
-
-
-
patch.db-V2.1-JBPMProcessKeypatch.schemaUpgradeScript.description
@@ -1237,6 +1209,7 @@
+
+
+
+
+ patch.db-V2.2-1-MLText
+ patch.schemaUpgradeScript.description
+ 86
+ 90
+ 91
+
+ classpath:alfresco/dbscripts/upgrade/2.2/${db.script.dialect}/upgrade-1-mltext.sql
+
+
+
+
+
+
+
+
+ patch.db-V2.2-1-FullDmUpgrade
+ patch.schemaUpgradeScript.description
+ 0
+ 85
+ 91
+
+ classpath:alfresco/dbscripts/upgrade/2.2/${db.script.dialect}/upgrade-1-fulldm.sql
+
+
+
+
+
+
+ patch.wcmPermissionPatch
@@ -1391,12 +1409,12 @@
-
+
-
-
+
+ patch.updateDmPermissionspatch.updateDmPermissions.description0
@@ -1565,4 +1583,32 @@
-
+
+
+ patch.db-V2.2-Person
+ patch.schemaUpgradeScript.description
+ 0
+ 134
+ 135
+
+ classpath:alfresco/dbscripts/upgrade/2.2/${db.script.dialect}/AlfrescoSchemaUpdate-Person.sql
+
+
+
+
+ patch.db-V2.2-OrclBLOB
+ patch.schemaUpgradeScript.description
+ 0
+ 134
+ 135
+
+ classpath:alfresco/dbscripts/upgrade/2.2/${db.script.dialect}/AlfrescoSchemaUpdate-OrclBLOB.sql
+
+
+
+
+
+
+
+
+
diff --git a/config/alfresco/version.properties b/config/alfresco/version.properties
index aa01ee725f..932fee6c1f 100644
--- a/config/alfresco/version.properties
+++ b/config/alfresco/version.properties
@@ -19,4 +19,4 @@ version.build=@build-number@
# Schema number
-version.schema=134
+version.schema=135
diff --git a/source/java/org/alfresco/repo/admin/patch/impl/AVMAspectsPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/AVMAspectsPatch.java
deleted file mode 100644
index e41d4e0184..0000000000
--- a/source/java/org/alfresco/repo/admin/patch/impl/AVMAspectsPatch.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing
- */
-
-package org.alfresco.repo.admin.patch.impl;
-
-import java.util.Iterator;
-
-import org.alfresco.i18n.I18NUtil;
-import org.alfresco.repo.admin.patch.AbstractPatch;
-import org.alfresco.repo.avm.AVMAspectName;
-import org.alfresco.repo.avm.AVMAspectNameDAO;
-import org.alfresco.repo.domain.QNameDAO;
-import org.alfresco.repo.domain.QNameEntity;
-import org.alfresco.service.namespace.QName;
-
-/**
- * Patches from old style aspect storage for AVM to new style.
- * @author britt
- */
-public class AVMAspectsPatch extends AbstractPatch
-{
- private static final String MSG_SUCCESS = "patch.AVMAspects.result";
-
- private AVMAspectNameDAO fAVMAspectDAO;
- private QNameDAO qnameDAO;
-
- public void setAvmAspectNameDAO(AVMAspectNameDAO dao)
- {
- fAVMAspectDAO = dao;
- }
-
- public void setQnameDAO(QNameDAO qnameDAO)
- {
- this.qnameDAO = qnameDAO;
- }
-
- /* (non-Javadoc)
- * @see org.alfresco.repo.admin.patch.AbstractPatch#applyInternal()
- */
- @Override
- protected String applyInternal() throws Exception
- {
- Iterator iter = fAVMAspectDAO.iterator();
- while (iter.hasNext())
- {
- AVMAspectName aspect = iter.next();
- QName aspectQName = aspect.getName();
- QNameEntity aspectQNameEntity = qnameDAO.getOrCreateQNameEntity(aspectQName);
- aspect.getNode().getAspects().add(aspectQNameEntity.getId());
- fAVMAspectDAO.delete(aspect);
- }
- return I18NUtil.getMessage(MSG_SUCCESS);
- }
-}
diff --git a/source/java/org/alfresco/repo/admin/patch/impl/AVMPropertiesPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/AVMPropertiesPatch.java
deleted file mode 100644
index 40192e26ce..0000000000
--- a/source/java/org/alfresco/repo/admin/patch/impl/AVMPropertiesPatch.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing
- */
-
-package org.alfresco.repo.admin.patch.impl;
-
-import java.util.Iterator;
-
-import org.alfresco.i18n.I18NUtil;
-import org.alfresco.repo.admin.patch.AbstractPatch;
-import org.alfresco.repo.avm.AVMNodeProperty;
-import org.alfresco.repo.avm.AVMNodePropertyDAO;
-import org.alfresco.repo.domain.QNameDAO;
-import org.alfresco.service.namespace.QName;
-
-/**
- * Patch more remapping AVM properties.
- * @author britt
- */
-public class AVMPropertiesPatch extends AbstractPatch
-{
- private static final String MSG_SUCCESS = "patch.AVMProperties.result";
-
- private QNameDAO qnameDAO;
- private AVMNodePropertyDAO fAVMNodePropertyDAO;
-
- public void setQnameDAO(QNameDAO qnameDAO)
- {
- this.qnameDAO = qnameDAO;
- }
-
- public void setAvmNodePropertyDAO(AVMNodePropertyDAO dao)
- {
- fAVMNodePropertyDAO = dao;
- }
-
- /* (non-Javadoc)
- * @see org.alfresco.repo.admin.patch.AbstractPatch#applyInternal()
- */
- @Override
- protected String applyInternal() throws Exception
- {
- Iterator iter = fAVMNodePropertyDAO.iterate();
- while (iter.hasNext())
- {
- AVMNodeProperty prop = iter.next();
- QName propertyQName = prop.getName();
- Long propertyQNameEntityId = qnameDAO.getOrCreateQNameEntity(propertyQName).getId();
- prop.getNode().getProperties().put(propertyQNameEntityId, prop.getValue());
- fAVMNodePropertyDAO.delete(prop.getNode(), prop.getName());
- }
- return I18NUtil.getMessage(MSG_SUCCESS);
- }
-}
diff --git a/source/java/org/alfresco/repo/admin/patch/impl/NodePropertySerializablePatch.java b/source/java/org/alfresco/repo/admin/patch/impl/NodePropertySerializablePatch.java
index 2007dcdfb2..ce8874187f 100644
--- a/source/java/org/alfresco/repo/admin/patch/impl/NodePropertySerializablePatch.java
+++ b/source/java/org/alfresco/repo/admin/patch/impl/NodePropertySerializablePatch.java
@@ -31,7 +31,8 @@ import java.util.Map;
import org.alfresco.i18n.I18NUtil;
import org.alfresco.repo.admin.patch.AbstractPatch;
import org.alfresco.repo.domain.Node;
-import org.alfresco.repo.domain.PropertyValue;
+import org.alfresco.repo.domain.NodePropertyValue;
+import org.alfresco.repo.domain.PropertyMapKey;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.hibernate.Query;
import org.hibernate.Session;
@@ -103,22 +104,17 @@ public class NodePropertySerializablePatch extends AbstractPatch
{
Node node = iterator.next();
// retrieve the node properties
- Map properties = node.getProperties();
+ Map properties = node.getProperties();
// check each property
- for (Map.Entry entry : properties.entrySet())
+ for (Map.Entry entry : properties.entrySet())
{
- PropertyValue propertyValue = entry.getValue();
+ NodePropertyValue propertyValue = entry.getValue();
if (propertyValue.getSerializableValue() == null)
{
// the property was not persisted as a serializable - nothing to do
continue;
}
- else if (propertyValue.isMultiValued())
- {
- // this is a persisted collection - nothing to do
- continue;
- }
- else if (!"SERIALIZABLE".equals(propertyValue.getActualType()))
+ else if (!"SERIALIZABLE".equals(propertyValue.getActualTypeString()))
{
// only handle actual types that were pushed in as any old type
continue;
@@ -126,7 +122,7 @@ public class NodePropertySerializablePatch extends AbstractPatch
// make sure that this value is persisted correctly
Serializable value = propertyValue.getSerializableValue();
// put it back
- PropertyValue newPropertyValue = new PropertyValue(DataTypeDefinition.ANY, value);
+ NodePropertyValue newPropertyValue = new NodePropertyValue(DataTypeDefinition.ANY, value);
entry.setValue(newPropertyValue);
count++;
}
diff --git a/source/java/org/alfresco/repo/audit/AuditableAspect.java b/source/java/org/alfresco/repo/audit/AuditableAspect.java
deleted file mode 100644
index 5808869cfa..0000000000
--- a/source/java/org/alfresco/repo/audit/AuditableAspect.java
+++ /dev/null
@@ -1,286 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing"
- */
-package org.alfresco.repo.audit;
-
-import java.io.Serializable;
-import java.util.Date;
-import java.util.Map;
-
-import org.alfresco.model.ContentModel;
-import org.alfresco.repo.policy.Behaviour;
-import org.alfresco.repo.policy.JavaBehaviour;
-import org.alfresco.repo.policy.PolicyComponent;
-import org.alfresco.repo.policy.PolicyScope;
-import org.alfresco.repo.policy.Behaviour.NotificationFrequency;
-import org.alfresco.repo.security.authentication.AuthenticationUtil;
-import org.alfresco.repo.tenant.TenantService;
-import org.alfresco.service.cmr.repository.ChildAssociationRef;
-import org.alfresco.service.cmr.repository.InvalidNodeRefException;
-import org.alfresco.service.cmr.repository.NodeRef;
-import org.alfresco.service.cmr.repository.NodeService;
-import org.alfresco.service.cmr.repository.StoreRef;
-import org.alfresco.service.cmr.security.AuthenticationService;
-import org.alfresco.service.namespace.NamespaceService;
-import org.alfresco.service.namespace.QName;
-import org.alfresco.util.PropertyMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-
-/**
- * This aspect maintains the audit properties of the Auditable aspect.
- *
- * @author David Caruana
- */
-public class AuditableAspect
-{
- // Logger
- private static final Log logger = LogFactory.getLog(AuditableAspect.class);
-
- // Unknown user, for when authentication has not occured
- private static final String USERNAME_UNKNOWN = "unknown";
-
- // Dependencies
- private NodeService nodeService;
- private AuthenticationService authenticationService;
- private PolicyComponent policyComponent;
- private TenantService tenantService;
-
- // Behaviours
- private Behaviour onCreateAudit;
- private Behaviour onAddAudit;
- private Behaviour onUpdateAudit;
-
-
- /**
- * @param nodeService the node service to use for audit property maintenance
- */
- public void setNodeService(NodeService nodeService)
- {
- this.nodeService = nodeService;
- }
-
- /**
- * @param policyComponent the policy component
- */
- public void setPolicyComponent(PolicyComponent policyComponent)
- {
- this.policyComponent = policyComponent;
- }
-
- /**
- * @param authenticationService the authentication service
- */
- public void setAuthenticationService(AuthenticationService authenticationService)
- {
- this.authenticationService = authenticationService;
- }
-
- /**
- * @param tenantService the tenant service
- */
- public void setTenantService(TenantService tenantService)
- {
- this.tenantService = tenantService;
- }
-
-
- /**
- * Initialise the Auditable Aspect
- */
- public void init()
- {
- // Create behaviours
- onCreateAudit = new JavaBehaviour(this, "onCreateAudit", NotificationFrequency.FIRST_EVENT);
- onAddAudit = new JavaBehaviour(this, "onAddAudit", NotificationFrequency.FIRST_EVENT);
- onUpdateAudit = new JavaBehaviour(this, "onUpdateAudit", NotificationFrequency.TRANSACTION_COMMIT);
-
- // Bind behaviours to node policies
- policyComponent.bindClassBehaviour(QName.createQName(NamespaceService.ALFRESCO_URI, "onCreateNode"), ContentModel.ASPECT_AUDITABLE, onCreateAudit);
- policyComponent.bindClassBehaviour(QName.createQName(NamespaceService.ALFRESCO_URI, "onAddAspect"), ContentModel.ASPECT_AUDITABLE, onAddAudit);
- policyComponent.bindClassBehaviour(QName.createQName(NamespaceService.ALFRESCO_URI, "onUpdateNode"), ContentModel.ASPECT_AUDITABLE, onUpdateAudit);
-
- // Register onCopy class behaviour
- policyComponent.bindClassBehaviour(QName.createQName(NamespaceService.ALFRESCO_URI, "onCopyNode"), ContentModel.ASPECT_AUDITABLE, new JavaBehaviour(this, "onCopy"));
- }
-
- /**
- * Maintain audit properties on creation of Node
- *
- * @param childAssocRef the association to the child created
- */
- public void onCreateAudit(ChildAssociationRef childAssocRef)
- {
- NodeRef nodeRef = childAssocRef.getChildRef();
- onAddAudit(nodeRef, null);
- }
-
- /**
- * Maintain audit properties on addition of audit aspect to a node
- *
- * @param nodeRef the node to which auditing has been added
- * @param aspect the aspect added
- */
- public void onAddAudit(NodeRef nodeRef, QName aspect)
- {
- // Get the current properties
- PropertyMap properties = new PropertyMap();
-
- // Set created / updated date
- Date now = new Date(System.currentTimeMillis());
- properties.put(ContentModel.PROP_CREATED, now);
- properties.put(ContentModel.PROP_MODIFIED, now);
-
- // Set creator (but do not override, if explicitly set)
- String creator = (String)properties.get(ContentModel.PROP_CREATOR);
- if (creator == null || creator.length() == 0)
- {
- creator = getUsername();
- properties.put(ContentModel.PROP_CREATOR, creator);
- }
- properties.put(ContentModel.PROP_MODIFIER, creator);
-
- try
- {
- // Set the updated property values (but do not cascade to update audit behaviour)
- onUpdateAudit.disable();
-
- // note: in MT case, this will run in System context of user's domain ... checkForLock requires System
- AuthenticationUtil.runAs(new SetAuditProperties(nodeService, nodeRef, properties), AuthenticationUtil.getSystemUserName());
- }
- finally
- {
- onUpdateAudit.enable();
- }
-
- if (logger.isDebugEnabled())
- logger.debug("Auditable node " + nodeRef + " created [created,modified=" + now + ";creator,modifier=" + creator + "]");
- }
-
- /**
- * Maintain audit properties on update of node
- *
- * @param nodeRef the updated node
- */
- public void onUpdateAudit(NodeRef nodeRef)
- {
- // Get the current properties
- try
- {
- PropertyMap properties = new PropertyMap();
-
- // Set updated date
- Date now = new Date(System.currentTimeMillis());
- properties.put(ContentModel.PROP_MODIFIED, now);
-
- // Set modifier
- String modifier = getUsername();
- properties.put(ContentModel.PROP_MODIFIER, modifier);
-
- // Set the updated property values
-
- // note: in MT case, this will run in System context of user's domain ... checkForLock requires System
- AuthenticationUtil.runAs(new SetAuditProperties(nodeService, nodeRef, properties), AuthenticationUtil.getSystemUserName());
-
- if (logger.isDebugEnabled())
- logger.debug("Auditable node " + nodeRef + " updated [modified=" + now + ";modifier=" + modifier + "]");
- }
- catch(InvalidNodeRefException e)
- {
- if (logger.isDebugEnabled())
- logger.debug("Warning: Auditable node " + nodeRef + " no longer exists - cannot update");
- }
- }
-
- /**
- * @return the current username (or unknown, if unknown)
- */
- private String getUsername()
- {
- String currentUserName = authenticationService.getCurrentUserName();
- if (currentUserName != null)
- {
- if (tenantService.isEnabled() && authenticationService.isCurrentUserTheSystemUser())
- {
- return tenantService.getBaseNameUser(currentUserName);
- }
- return currentUserName;
- }
- return USERNAME_UNKNOWN;
- }
-
- /**
- * OnCopy behaviour implementation for the lock aspect.
- *
- * Ensures that the propety values of the lock aspect are not copied onto
- * the destination node.
- *
- * @see org.alfresco.repo.copy.CopyServicePolicies.OnCopyNodePolicy#onCopyNode(QName, NodeRef, StoreRef, boolean, PolicyScope)
- */
- public void onCopy(
- QName sourceClassRef,
- NodeRef sourceNodeRef,
- StoreRef destinationStoreRef,
- boolean copyToNewNode,
- PolicyScope copyDetails)
- {
- // The auditable aspect should not be copied
- }
-
-
- /**
- * Helper to set Audit Properties as System User
- */
- private static class SetAuditProperties implements AuthenticationUtil.RunAsWork
- {
- private NodeService nodeService;
- private NodeRef nodeRef;
- private Map properties;
-
- /**
- * Construct
- */
- private SetAuditProperties(NodeService nodeService, NodeRef nodeRef, Map properties)
- {
- this.nodeService = nodeService;
- this.nodeRef = nodeRef;
- this.properties = properties;
- }
-
- public Boolean doWork() throws Exception
- {
- for (Map.Entry entry : properties.entrySet())
- {
- QName propertyQName = entry.getKey();
- Serializable propertyValue = entry.getValue();
- nodeService.setProperty(nodeRef, propertyQName, propertyValue);
- }
- return Boolean.TRUE;
-
- }
- }
-
-
-}
diff --git a/source/java/org/alfresco/repo/audit/AuditableAspectTest.java b/source/java/org/alfresco/repo/audit/AuditableAspectTest.java
index e13006c33f..fcf96a36f4 100644
--- a/source/java/org/alfresco/repo/audit/AuditableAspectTest.java
+++ b/source/java/org/alfresco/repo/audit/AuditableAspectTest.java
@@ -25,9 +25,9 @@
package org.alfresco.repo.audit;
import java.io.Serializable;
+import java.util.Date;
import java.util.HashMap;
import java.util.Map;
-import java.util.Set;
import org.alfresco.model.ContentModel;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
@@ -88,68 +88,21 @@ public class AuditableAspectTest extends BaseSpringTest
System.out.println(NodeStoreInspector.dumpNodeStore(nodeService, storeRef));
}
-
+ /**
+ * @deprecated cm:auditable is always present (2.2.2)
+ */
public void testNoAudit()
{
- // Create a person (which doesn't have auditable capability by default)
- Map personProps = new HashMap();
- personProps.put(ContentModel.PROP_USERNAME, "test person");
- personProps.put(ContentModel.PROP_HOMEFOLDER, rootNodeRef);
- personProps.put(ContentModel.PROP_FIRSTNAME, "test first name");
- personProps.put(ContentModel.PROP_LASTNAME, "test last name");
-
- ChildAssociationRef childAssocRef = nodeService.createNode(
- rootNodeRef,
- ContentModel.ASSOC_CHILDREN,
- QName.createQName("{test}testperson"),
- ContentModel.TYPE_PERSON,
- personProps);
-
- // Assert the person is not auditable
- Set aspects = nodeService.getAspects(childAssocRef.getChildRef());
- assertFalse(aspects.contains(ContentModel.ASPECT_AUDITABLE));
-
- System.out.println(NodeStoreInspector.dumpNodeStore(nodeService, storeRef));
}
-
+ /**
+ * @deprecated cm:auditable is always present (2.2.2)
+ */
public void testAddAudit()
{
- // Create a person
- Map personProps = new HashMap();
- personProps.put(ContentModel.PROP_USERNAME, "test person");
- personProps.put(ContentModel.PROP_HOMEFOLDER, rootNodeRef);
- personProps.put(ContentModel.PROP_FIRSTNAME, "test first name");
- personProps.put(ContentModel.PROP_LASTNAME, "test last name");
-
- ChildAssociationRef childAssocRef = nodeService.createNode(
- rootNodeRef,
- ContentModel.ASSOC_CHILDREN,
- QName.createQName("{test}testperson"),
- ContentModel.TYPE_PERSON,
- personProps);
-
- // Assert the person is not auditable
- Set aspects = nodeService.getAspects(childAssocRef.getChildRef());
- assertFalse(aspects.contains(ContentModel.ASPECT_AUDITABLE));
-
- // Add auditable capability
- nodeService.addAspect(childAssocRef.getChildRef(), ContentModel.ASPECT_AUDITABLE, null);
-
- nodeService.addAspect(childAssocRef.getChildRef(), ContentModel.ASPECT_TITLED, null);
-
- // Assert the person is now audiable
- aspects = nodeService.getAspects(childAssocRef.getChildRef());
- assertTrue(aspects.contains(ContentModel.ASPECT_AUDITABLE));
-
- // Assert the person's auditable property
- assertAuditableProperties(childAssocRef.getChildRef());
-
- System.out.println(NodeStoreInspector.dumpNodeStore(nodeService, storeRef));
}
-
- public void testAddAspect()
+ public synchronized void testAddAspect() throws Exception
{
// Create a person (which doesn't have auditable capability by default)
Map personProps = new HashMap();
@@ -158,20 +111,45 @@ public class AuditableAspectTest extends BaseSpringTest
personProps.put(ContentModel.PROP_FIRSTNAME, "test first name ");
personProps.put(ContentModel.PROP_LASTNAME, "test last name");
+ long t1 = System.currentTimeMillis();
+ this.wait(100);
+
ChildAssociationRef childAssocRef = nodeService.createNode(
rootNodeRef,
ContentModel.ASSOC_CHILDREN,
QName.createQName("{test}testperson"),
ContentModel.TYPE_PERSON,
personProps);
-
+ NodeRef nodeRef = childAssocRef.getChildRef();
+
+ assertAuditableProperties(nodeRef);
+
+ long t2 = System.currentTimeMillis();
+
+ // Check that the dates were set correctly
+ Date aspectCreatedDate1 = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_CREATED);
+ Date aspectModifiedDate1 = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED);
+ assertTrue("Created date should be later than t1", t1 < aspectCreatedDate1.getTime());
+ assertTrue(
+ "Modified date must be after or on creation date",
+ aspectCreatedDate1.getTime() <= aspectModifiedDate1.getTime() &&
+ aspectModifiedDate1.getTime() < t2);
+
+ long t3 = System.currentTimeMillis();
+ this.wait(100);
+
// Add auditable capability
- nodeService.addAspect(childAssocRef.getChildRef(), ContentModel.ASPECT_TITLED, null);
+ nodeService.addAspect(nodeRef, ContentModel.ASPECT_TITLED, null);
+
+ // Check that the dates were set correctly
+ Date aspectCreatedDate2 = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_CREATED);
+ Date aspectModifiedDate2 = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED);
+ assertEquals("The created date must not change", aspectCreatedDate1, aspectCreatedDate2);
+ assertTrue("New modified date should be later than t3", t3 < aspectModifiedDate2.getTime());
System.out.println(NodeStoreInspector.dumpNodeStore(nodeService, storeRef));
}
-
private void assertAuditableProperties(NodeRef nodeRef)
{
Map props = nodeService.getProperties(nodeRef);
diff --git a/source/java/org/alfresco/repo/avm/AVMAspectNameDAO.java b/source/java/org/alfresco/repo/avm/AVMAspectNameDAO.java
deleted file mode 100644
index 265d699780..0000000000
--- a/source/java/org/alfresco/repo/avm/AVMAspectNameDAO.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing" */
-
-package org.alfresco.repo.avm;
-
-import java.util.Iterator;
-import java.util.List;
-
-import org.alfresco.service.namespace.QName;
-
-/**
- * DAO for AVMAspectNames.
- * @author britt
- */
-public interface AVMAspectNameDAO
-{
- /**
- * Persist an aspect name.
- * @param aspectName The item to persist.
- */
- public void save(AVMAspectName aspectName);
-
- /**
- * Delete an Aspect Name.
- * @param aspectName The item to delete.
- */
- public void delete(AVMAspectName aspectName);
-
- /**
- * Delete a single aspect name from a node.
- * @param node The node.
- * @param aspectName The aspect name.
- */
- public void delete(AVMNode node, QName aspectName);
-
- /**
- * Delete all Aspect Names on a given node.
- * @param node The given node.
- */
- public void delete(AVMNode node);
-
- /**
- * Get all Aspect Names for a given node.
- * @param node The AVM Node.
- * @return A List of AVMAspectNames.
- */
- public List get(AVMNode node);
-
- /**
- * Does the given node have the given asset.
- * @param node The AVM node.
- * @param name The QName of the Aspect.
- * @return Whether the aspect is there.
- */
- public boolean exists(AVMNode node, QName name);
-
- /**
- * Get an iterator over all aspect instances.
- * @return
- */
- public Iterator iterator();
-}
diff --git a/source/java/org/alfresco/repo/avm/AVMAspectNameImpl.java b/source/java/org/alfresco/repo/avm/AVMAspectNameImpl.java
deleted file mode 100644
index 2877651b22..0000000000
--- a/source/java/org/alfresco/repo/avm/AVMAspectNameImpl.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing" */
-
-package org.alfresco.repo.avm;
-
-import java.io.Serializable;
-
-import org.alfresco.service.namespace.QName;
-
-/**
- * Simple bean that implements AVMAspectName.
- * @author britt
- */
-class AVMAspectNameImpl implements AVMAspectName, Serializable
-{
- private static final long serialVersionUID = -6282415309583571934L;
-
- /**
- * The Primary Key.
- */
- private Long fID;
-
- /**
- * The Node that has the named aspect.
- */
- private AVMNode fNode;
-
- /**
- * The name of the Aspect.
- */
- private QName fName;
-
- /**
- * Default constructor.
- */
- public AVMAspectNameImpl()
- {
- }
-
- /**
- * Set the node that has the Aspect.
- * @param node The node.
- */
- public void setNode(AVMNode node)
- {
- fNode = node;
- }
-
- /**
- * Get the node that has this Aspect name.
- * @return The AVM Node.
- */
- public AVMNode getNode()
- {
- return fNode;
- }
-
- /**
- * Set the name of the Aspect.
- * @param name The QName of the Aspect.
- */
- public void setName(QName name)
- {
- fName = name;
- }
-
- /**
- * Get the name of this Aspect.
- * @return The QName of this aspect.
- */
- public QName getName()
- {
- return fName;
- }
-
- /**
- * Set the primary key (For Hibernate)
- * @param id The primary key.
- */
- protected void setId(Long id)
- {
- fID = id;
- }
-
- /**
- * Get the primary key (For Hibernate)
- * @return The primary key.
- */
- protected Long getId()
- {
- return fID;
- }
-
- @Override
- public boolean equals(Object obj)
- {
- if (this == obj)
- {
- return true;
- }
- if (!(obj instanceof AVMAspectName))
- {
- return false;
- }
- AVMAspectName o = (AVMAspectName)obj;
- return fNode.equals(o.getNode()) && fName.equals(o.getName());
- }
-
- @Override
- public int hashCode()
- {
- return fNode.hashCode() + fName.hashCode();
- }
-}
diff --git a/source/java/org/alfresco/repo/avm/AVMDAOs.java b/source/java/org/alfresco/repo/avm/AVMDAOs.java
index 2eca72438b..d42812fef9 100644
--- a/source/java/org/alfresco/repo/avm/AVMDAOs.java
+++ b/source/java/org/alfresco/repo/avm/AVMDAOs.java
@@ -74,21 +74,11 @@ public class AVMDAOs
*/
public MergeLinkDAO fMergeLinkDAO;
- /**
- * The AVMNodePropertyDAO
- */
- public AVMNodePropertyDAO fAVMNodePropertyDAO;
-
/**
* The AVMStorePropertyDAO
*/
public AVMStorePropertyDAO fAVMStorePropertyDAO;
- /**
- * The AVMAspectNameDAO
- */
- public AVMAspectNameDAO fAVMAspectNameDAO;
-
public AttributeDAO fAttributeDAO;
public MapEntryDAO fMapEntryDAO;
@@ -160,21 +150,11 @@ public class AVMDAOs
fIssuerDAO = issuerDAO;
}
- public void setAvmNodePropertyDAO(AVMNodePropertyDAO avmNodePropertyDAO)
- {
- fAVMNodePropertyDAO = avmNodePropertyDAO;
- }
-
public void setAvmStorePropertyDAO(AVMStorePropertyDAO avmStorePropertyDAO)
{
fAVMStorePropertyDAO = avmStorePropertyDAO;
}
- public void setAvmAspectNameDAO(AVMAspectNameDAO avmAspectNameDAO)
- {
- fAVMAspectNameDAO = avmAspectNameDAO;
- }
-
public void setAttributeDAO(AttributeDAO dao)
{
fAttributeDAO = dao;
diff --git a/source/java/org/alfresco/repo/avm/AVMNodeProperty.java b/source/java/org/alfresco/repo/avm/AVMNodeProperty.java
deleted file mode 100644
index 5555cd83f9..0000000000
--- a/source/java/org/alfresco/repo/avm/AVMNodeProperty.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing" */
-
-package org.alfresco.repo.avm;
-
-import org.alfresco.repo.domain.PropertyValue;
-import org.alfresco.service.namespace.QName;
-
-/**
- * Alfresco Properties for AVM..
- * @author britt
- */
-public interface AVMNodeProperty
-{
- /**
- * Set the node that owns this property.
- * @param node The AVMNode.
- */
- public void setNode(AVMNode node);
-
- /**
- * Get the node that owns this property.
- * @return An AVMNode.
- */
- public AVMNode getNode();
-
- /**
- * Get the name for this property.
- * @return A QName.
- */
- public QName getName();
-
- /**
- * Set the name for the property.
- * @param id A QName.
- */
- public void setName(QName id);
-
- /**
- * Get the actual property value.
- * @return A PropertyValue.
- */
- public PropertyValue getValue();
-
- /**
- * Set the value of this property.
- * @param value A PropertyValue.
- */
- public void setValue(PropertyValue value);
-}
diff --git a/source/java/org/alfresco/repo/avm/AVMNodePropertyDAO.java b/source/java/org/alfresco/repo/avm/AVMNodePropertyDAO.java
deleted file mode 100644
index 3ef9580bf0..0000000000
--- a/source/java/org/alfresco/repo/avm/AVMNodePropertyDAO.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing" */
-
-package org.alfresco.repo.avm;
-
-import java.util.Iterator;
-import java.util.List;
-
-import org.alfresco.service.namespace.QName;
-
-/**
- * DAO for AVMNodeProperty.
- * @author britt
- */
-public interface AVMNodePropertyDAO
-{
- /**
- * Save the given AVMNodeProperty.
- * @param prop
- */
- public void save(AVMNodeProperty prop);
-
- /**
- * Get an AVMNodeProperty by owner and name.
- * @param owner An AVMNode.
- * @param name The QName.
- * @return The found AVMNodeProperty or null if not found.
- */
- public AVMNodeProperty get(AVMNode owner, QName name);
-
- /**
- * Get a List of all properties for an owning node.
- * @param node The owning node.
- * @return A List of properties belonging to the given node.
- */
- public List get(AVMNode node);
-
- /**
- * Update a property entry.
- * @param prop The property.
- */
- public void update(AVMNodeProperty prop);
-
- /**
- * Delete all properties associated with a node.
- * @param node The AVMNode whose properties should be deleted.
- */
- public void deleteAll(AVMNode node);
-
- /**
- * Delete the given property from the given node.
- * @param node The node to delete the property to delete.
- * @param name The name of the property to delete.
- */
- public void delete(AVMNode node, QName name);
-
- /**
- * Get an iterator over all properties.
- * @return
- */
- public Iterator iterate();
-}
diff --git a/source/java/org/alfresco/repo/avm/AVMNodePropertyImpl.java b/source/java/org/alfresco/repo/avm/AVMNodePropertyImpl.java
deleted file mode 100644
index 08f843f141..0000000000
--- a/source/java/org/alfresco/repo/avm/AVMNodePropertyImpl.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing" */
-
-package org.alfresco.repo.avm;
-
-import java.io.Serializable;
-
-import org.alfresco.repo.domain.PropertyValue;
-import org.alfresco.service.namespace.QName;
-
-/**
- * A Property attached to an AVMNode.
- * @author britt
- */
-class AVMNodePropertyImpl implements AVMNodeProperty, Serializable
-{
- private static final long serialVersionUID = -7194228119659288619L;
-
- /**
- * The primary key.
- */
- private Long fID;
-
- /**
- * The node that owns this.
- */
- private AVMNode fNode;
-
- /**
- * The QName of this property.
- */
- private QName fName;
-
- /**
- * The PropertyValue.
- */
- private PropertyValue fValue;
-
- /**
- * Default constructor.
- */
- public AVMNodePropertyImpl()
- {
- }
-
- /**
- * Get the owning node.
- * @return The AVMNode.
- */
- public AVMNode getNode()
- {
- return fNode;
- }
-
- /**
- * Set the owning node.
- * @param node The AVMNode to set.
- */
- public void setNode(AVMNode node)
- {
- fNode = node;
- }
-
- /**
- * Get the name, a QName
- * @return A QName.
- */
- public QName getName()
- {
- return fName;
- }
-
- /**
- * Set the name, a QName.
- * @param name The QName.
- */
- public void setName(QName name)
- {
- fName = name;
- }
-
- /**
- * Get the value.
- * @return A PropertyValue
- */
- public PropertyValue getValue()
- {
- return fValue;
- }
-
- /**
- * Set the value.
- * @param value A PropertyValue.
- */
- public void setValue(PropertyValue value)
- {
- fValue = value;
- }
-
- /**
- * Set the primary key. (For Hibernate)
- * @param id The primary key.
- */
- protected void setId(Long id)
- {
- fID = id;
- }
-
- /**
- * Get the primary key. (For Hibernate)
- * @return The primary key.
- */
- protected Long getId()
- {
- return fID;
- }
-
- @Override
- public boolean equals(Object other)
- {
- if (this == other)
- {
- return true;
- }
- if (!(other instanceof AVMNodeProperty))
- {
- return false;
- }
- AVMNodeProperty o = (AVMNodeProperty)other;
- return fNode.equals(o.getNode()) && fName.equals(o.getName());
- }
-
- @Override
- public int hashCode()
- {
- return fNode.hashCode() + fName.hashCode();
- }
-}
diff --git a/source/java/org/alfresco/repo/avm/AVMNodeService.java b/source/java/org/alfresco/repo/avm/AVMNodeService.java
index c4a4abb9fd..528b0e20c3 100644
--- a/source/java/org/alfresco/repo/avm/AVMNodeService.java
+++ b/source/java/org/alfresco/repo/avm/AVMNodeService.java
@@ -25,6 +25,7 @@ package org.alfresco.repo.avm;
import java.io.Serializable;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
@@ -46,6 +47,8 @@ import org.alfresco.service.cmr.avm.AVMService;
import org.alfresco.service.cmr.avm.AVMStoreDescriptor;
import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.ClassDefinition;
+import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
+import org.alfresco.service.cmr.dictionary.DictionaryException;
import org.alfresco.service.cmr.dictionary.InvalidAspectException;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
@@ -63,6 +66,7 @@ import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.Path;
import org.alfresco.service.cmr.repository.StoreExistsException;
import org.alfresco.service.cmr.repository.StoreRef;
+import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.QNamePattern;
@@ -108,6 +112,115 @@ public class AVMNodeService extends AbstractNodeServiceImpl implements NodeServi
{
fInvokePolicies = invoke;
}
+
+ /**
+ * Helper method to convert the Serializable value into a full,
+ * persistable {@link PropertyValue}.
+ *
+ * Where the property definition is null, the value will take on the
+ * {@link DataTypeDefinition#ANY generic ANY} value.
+ *
+ * Where the property definition specifies a multi-valued property but the
+ * value provided is not a collection, the value will be wrapped in a collection.
+ *
+ * @param propertyDef the property dictionary definition, may be null
+ * @param value the value, which will be converted according to the definition -
+ * may be null
+ * @return Returns the persistable property value
+ */
+ protected PropertyValue makePropertyValue(PropertyDefinition propertyDef, Serializable value)
+ {
+ // get property attributes
+ QName propertyTypeQName = null;
+ if (propertyDef == null) // property not recognised
+ {
+ // allow it for now - persisting excess properties can be useful sometimes
+ propertyTypeQName = DataTypeDefinition.ANY;
+ }
+ else
+ {
+ propertyTypeQName = propertyDef.getDataType().getName();
+ // check that multi-valued properties are allowed
+ boolean isMultiValued = propertyDef.isMultiValued();
+ if (isMultiValued && !(value instanceof Collection))
+ {
+ if (value != null)
+ {
+ // put the value into a collection
+ // the implementation gives back a Serializable list
+ value = (Serializable) Collections.singletonList(value);
+ }
+ }
+ else if (!isMultiValued && (value instanceof Collection))
+ {
+ // we only allow this case if the property type is ANY
+ if (!propertyTypeQName.equals(DataTypeDefinition.ANY))
+ {
+ throw new DictionaryException(
+ "A single-valued property of this type may not be a collection: \n" +
+ " Property: " + propertyDef + "\n" +
+ " Type: " + propertyTypeQName + "\n" +
+ " Value: " + value);
+ }
+ }
+ }
+ try
+ {
+ PropertyValue propertyValue = new PropertyValue(propertyTypeQName, value);
+ // done
+ return propertyValue;
+ }
+ catch (TypeConversionException e)
+ {
+ throw new TypeConversionException(
+ "The property value is not compatible with the type defined for the property: \n" +
+ " property: " + (propertyDef == null ? "unknown" : propertyDef) + "\n" +
+ " value: " + value + "\n" +
+ " value type: " + value.getClass(),
+ e);
+ }
+ }
+
+ /**
+ * Extracts the externally-visible property from the {@link PropertyValue propertyValue}.
+ *
+ * @param propertyDef the model property definition - may be null
+ * @param propertyValue the persisted property
+ * @return Returns the value of the property in the format dictated by the property
+ * definition, or null if the property value is null
+ */
+ protected Serializable makeSerializableValue(PropertyDefinition propertyDef, PropertyValue propertyValue)
+ {
+ if (propertyValue == null)
+ {
+ return null;
+ }
+ // get property attributes
+ QName propertyTypeQName = null;
+ if (propertyDef == null)
+ {
+ // allow this for now
+ propertyTypeQName = DataTypeDefinition.ANY;
+ }
+ else
+ {
+ propertyTypeQName = propertyDef.getDataType().getName();
+ }
+ try
+ {
+ Serializable value = propertyValue.getValue(propertyTypeQName);
+ // done
+ return value;
+ }
+ catch (TypeConversionException e)
+ {
+ throw new TypeConversionException(
+ "The property value is not compatible with the type defined for the property: \n" +
+ " property: " + (propertyDef == null ? "unknown" : propertyDef) + "\n" +
+ " property value: " + propertyValue,
+ e);
+ }
+ }
/**
* Gets a list of all available node store references
diff --git a/source/java/org/alfresco/repo/avm/hibernate/AVM.hbm.xml b/source/java/org/alfresco/repo/avm/hibernate/AVM.hbm.xml
index 510e222072..d3766d6c22 100644
--- a/source/java/org/alfresco/repo/avm/hibernate/AVM.hbm.xml
+++ b/source/java/org/alfresco/repo/avm/hibernate/AVM.hbm.xml
@@ -46,14 +46,14 @@
-
+
-
+
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
+
+
-
+
@@ -226,7 +235,7 @@
name="source"
class="org.alfresco.repo.domain.hibernate.NodeImpl"
optimistic-lock="false"
- foreign-key="fk_alf_na_snode"
+ foreign-key="fk_alf_nass_snode"
lazy="false"
fetch="join"
not-null="true" >
@@ -237,7 +246,7 @@
name="target"
class="org.alfresco.repo.domain.hibernate.NodeImpl"
optimistic-lock="false"
- foreign-key="fk_alf_na_tnode"
+ foreign-key="fk_alf_nass_tnode"
lazy="false"
fetch="join"
not-null="true" >
@@ -248,7 +257,7 @@
name="typeQName"
class="org.alfresco.repo.domain.hibernate.QNameEntityImpl"
column="type_qname_id"
- foreign-key="fk_alf_na_tqn"
+ foreign-key="fk_alf_nass_tqn"
lazy="proxy"
fetch="select"
unique="false"
@@ -259,6 +268,17 @@
+
+ select
+ store
+ from
+ org.alfresco.repo.domain.hibernate.StoreImpl as store
+ join store.rootNode
+ where
+ store.protocol = :protocol and
+ store.identifier = :identifier
+
+
select
store
@@ -266,6 +286,16 @@
org.alfresco.repo.domain.hibernate.StoreImpl as store
+
+ select
+ node
+ from
+ org.alfresco.repo.domain.hibernate.NodeImpl as node
+ where
+ node.store.id = :storeId and
+ node.uuid = :uuid
+
+
update
org.alfresco.repo.domain.hibernate.ChildAssocImpl assoc
@@ -313,19 +343,6 @@
assoc.target.id = :nodeId
-
- select
- status
- from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status,
- org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
- join assoc.child as child
- where
- assoc.parent.id = :parentId and
- assoc.isPrimary = true and
- status.node.id = childId
-
-
select
child.id
@@ -402,13 +419,14 @@
assoc.isPrimary,
assoc.index,
child.id,
- child.store.key.protocol,
- child.store.key.identifier,
- child.uuid as parentUuid
+ store.protocol,
+ store.identifier,
+ child.uuid
from
org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
join assoc.parent as parent
join assoc.child as child
+ join child.store as store
where
assoc.parent.id = :parentId
order by
@@ -425,13 +443,14 @@
assoc.isPrimary,
assoc.index,
child.id,
- child.store.key.protocol,
- child.store.key.identifier,
- child.uuid as parentUuid
+ store.protocol,
+ store.identifier,
+ child.uuid
from
org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
join assoc.parent as parent
join assoc.child as child
+ join child.store as store
where
assoc.parent.id = :parentId and
assoc.qnameNamespace = :qnameNamespace and
@@ -450,13 +469,14 @@
assoc.isPrimary,
assoc.index,
child.id,
- child.store.key.protocol,
- child.store.key.identifier,
+ store.protocol,
+ store.identifier,
child.uuid
from
org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
join assoc.parent as parent
join assoc.child as child
+ join child.store as store
where
assoc.parent.id = :parentId and
assoc.typeQName.id in (:childAssocTypeQNameIds)
@@ -474,13 +494,14 @@
assoc.isPrimary,
assoc.index,
child.id,
- child.store.key.protocol,
- child.store.key.identifier,
+ store.protocol,
+ store.identifier,
child.uuid
from
org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
join assoc.parent as parent
join assoc.child as child
+ join child.store as store
where
assoc.parent.id = :parentId and
assoc.typeQName = :typeQName and
@@ -500,13 +521,14 @@
assoc.isPrimary,
assoc.index,
child.id,
- child.store.key.protocol,
- child.store.key.identifier,
+ store.protocol,
+ store.identifier,
child.uuid
from
org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
join assoc.parent as parent
join assoc.child as child
+ join child.store as store
where
assoc.parent.id = :parentId and
assoc.isPrimary = true
@@ -524,20 +546,18 @@
assoc.isPrimary,
assoc.index,
child.id,
- child.store.key.protocol,
- child.store.key.identifier,
+ store.protocol,
+ store.identifier,
child.uuid
from
org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
join assoc.parent as parent
join assoc.child as child
+ join child.store as store
where
assoc.parent.id = :parentId and
assoc.isPrimary = true and
- (
- child.store.key.protocol != parent.store.key.protocol or
- child.store.key.identifier != parent.store.key.identifier
- )
+ child.store.id != parent.store.id
order by
assoc.index,
assoc.id
@@ -546,20 +566,18 @@
select
parent.id,
- parent.store.key.protocol,
- parent.store.key.identifier,
+ parentStore.protocol,
+ parentStore.identifier,
parent.uuid
from
org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
join assoc.parent as parent
+ join parent.store as parentStore
join assoc.child as child
where
+ child.store.id != parent.store.id and
parent.id > :minNodeId and
- assoc.isPrimary = true and
- (
- child.store.key.protocol != parent.store.key.protocol or
- child.store.key.identifier != parent.store.key.identifier
- )
+ assoc.isPrimary = true
order by
parent.id
@@ -567,8 +585,8 @@
select
node.id,
- node.store.key.protocol,
- node.store.key.identifier,
+ node.store.protocol,
+ node.store.identifier,
node.uuid
from
org.alfresco.repo.domain.hibernate.NodeImpl as node
@@ -629,11 +647,12 @@
node.typeQName
from
org.alfresco.repo.domain.hibernate.NodeImpl as node
+ join node.store as store
join node.properties prop
where
- node.store.key.protocol = :protocol and
- node.store.key.identifier = :identifier and
- index(prop) = :propQNameId and
+ store.protocol = :protocol and
+ store.identifier = :identifier and
+ index(prop) = :propKey and
prop.stringValue = :propStringValue
@@ -659,8 +678,7 @@
org.alfresco.repo.domain.hibernate.NodeImpl as node
join node.properties as props
where
- props.serializableValue is not null and
- props.multiValued = false
+ props.serializableValue is not null
@@ -679,63 +697,4 @@
]]>
-
- select
- status
- from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
- where
- status.key.protocol = :protocol and
- status.key.identifier = :identifier
-
-
-
- select
- assoc
- from
- org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc
- where
- assoc.parent.id in (select
- node.id
- from
- org.alfresco.repo.domain.hibernate.NodeImpl node
- where
- node.store.key.protocol = :protocol and
- node.store.key.identifier = :identifier)
-
-
-
- select
- node
- from
- org.alfresco.repo.domain.hibernate.NodeImpl as node
- where
- node.store.key.protocol = :nodeProtocol and
- node.store.key.identifier = :nodeIdentifier and
- node.id != (select
- rootNode.id
- from
- org.alfresco.repo.domain.hibernate.StoreImpl store
- where
- store.key.protocol = :storeProtocol and
- store.key.identifier = :storeIdentifier)
-
-
-
- select
- count(node.id)
- from
- org.alfresco.repo.domain.hibernate.NodeImpl as node
-
-
-
- select
- count(node.id)
- from
- org.alfresco.repo.domain.hibernate.NodeImpl as node
- where
- node.store.key.protocol = :protocol and
- node.store.key.identifier = :identifier
-
-
diff --git a/source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java b/source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java
index c651827328..8ee5e30a71 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java
+++ b/source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java
@@ -25,7 +25,6 @@
package org.alfresco.repo.domain.hibernate;
import java.io.Serializable;
-import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -34,12 +33,14 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
-import org.alfresco.repo.domain.ChildAssoc;
+import org.alfresco.repo.domain.AuditableProperties;
import org.alfresco.repo.domain.DbAccessControlList;
import org.alfresco.repo.domain.Node;
-import org.alfresco.repo.domain.PropertyValue;
+import org.alfresco.repo.domain.NodePropertyValue;
+import org.alfresco.repo.domain.PropertyMapKey;
import org.alfresco.repo.domain.QNameEntity;
import org.alfresco.repo.domain.Store;
+import org.alfresco.repo.domain.Transaction;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.util.EqualsHelper;
@@ -60,10 +61,12 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
private Store store;
private String uuid;
private QNameEntity typeQName;
- private Set aspects;
- private Collection parentAssocs;
- private Map properties;
+ private Transaction transaction;
+ private boolean deleted;
private DbAccessControlList accessControlList;
+ private Set aspects;
+ private Map properties;
+ private AuditableProperties auditableProperties;
private transient ReadLock refReadLock;
private transient WriteLock refWriteLock;
@@ -71,13 +74,13 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
public NodeImpl()
{
- aspects = new HashSet(5);
- parentAssocs = new HashSet(5);
- properties = new HashMap(5);
-
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
refReadLock = lock.readLock();
refWriteLock = lock.writeLock();
+
+ aspects = new HashSet(5);
+ properties = new HashMap(5);
+ auditableProperties = new AuditableProperties();
}
/**
@@ -120,7 +123,14 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
*/
public String toString()
{
- return getNodeRef().toString();
+ StringBuilder sb = new StringBuilder(50);
+ sb.append("Node")
+ .append("[id=").append(id)
+ .append(", ref=").append(getNodeRef())
+ .append(", txn=").append(transaction)
+ .append(", deleted=").append(deleted)
+ .append("]");
+ return sb.toString();
}
public boolean equals(Object obj)
@@ -167,20 +177,6 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
this.id = id;
}
- public Long getVersion()
- {
- return version;
- }
-
- /**
- * For Hibernate use
- */
- @SuppressWarnings("unused")
- private void setVersion(Long version)
- {
- this.version = version;
- }
-
public Store getStore()
{
return store;
@@ -219,6 +215,40 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
}
}
+ public Long getVersion()
+ {
+ return version;
+ }
+
+ /**
+ * For Hibernate use
+ */
+ @SuppressWarnings("unused")
+ private void setVersion(Long version)
+ {
+ this.version = version;
+ }
+
+ public Transaction getTransaction()
+ {
+ return transaction;
+ }
+
+ public void setTransaction(Transaction transaction)
+ {
+ this.transaction = transaction;
+ }
+
+ public boolean getDeleted()
+ {
+ return deleted;
+ }
+
+ public void setDeleted(boolean deleted)
+ {
+ this.deleted = deleted;
+ }
+
public QNameEntity getTypeQName()
{
return typeQName;
@@ -229,6 +259,16 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
this.typeQName = typeQName;
}
+ public DbAccessControlList getAccessControlList()
+ {
+ return accessControlList;
+ }
+
+ public void setAccessControlList(DbAccessControlList accessControlList)
+ {
+ this.accessControlList = accessControlList;
+ }
+
public Set getAspects()
{
return aspects;
@@ -243,21 +283,7 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
this.aspects = aspects;
}
- public Collection getParentAssocs()
- {
- return parentAssocs;
- }
-
- /**
- * For Hibernate use
- */
- @SuppressWarnings("unused")
- private void setParentAssocs(Collection parentAssocs)
- {
- this.parentAssocs = parentAssocs;
- }
-
- public Map getProperties()
+ public Map getProperties()
{
return properties;
}
@@ -266,18 +292,18 @@ public class NodeImpl extends LifecycleAdapter implements Node, Serializable
* For Hibernate use
*/
@SuppressWarnings("unused")
- private void setProperties(Map properties)
+ private void setProperties(Map properties)
{
this.properties = properties;
}
- public DbAccessControlList getAccessControlList()
+ public AuditableProperties getAuditableProperties()
{
- return accessControlList;
+ return auditableProperties;
}
- public void setAccessControlList(DbAccessControlList accessControlList)
+ public void setAuditableProperties(AuditableProperties auditableProperties)
{
- this.accessControlList = accessControlList;
+ this.auditableProperties = (auditableProperties == null ? new AuditableProperties() : auditableProperties);
}
}
diff --git a/source/java/org/alfresco/repo/domain/hibernate/NodeStatusImpl.java b/source/java/org/alfresco/repo/domain/hibernate/NodeStatusImpl.java
deleted file mode 100644
index e4319f22e8..0000000000
--- a/source/java/org/alfresco/repo/domain/hibernate/NodeStatusImpl.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (C) 2005-2007 Alfresco Software Limited.
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
-
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
-
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
- * As a special exception to the terms and conditions of version 2.0 of
- * the GPL, you may redistribute this Program in connection with Free/Libre
- * and Open Source Software ("FLOSS") applications as described in Alfresco's
- * FLOSS exception. You should have recieved a copy of the text describing
- * the FLOSS exception, and it is also available here:
- * http://www.alfresco.com/legal/licensing"
- */
-package org.alfresco.repo.domain.hibernate;
-
-import java.io.Serializable;
-
-import org.alfresco.repo.domain.Node;
-import org.alfresco.repo.domain.NodeKey;
-import org.alfresco.repo.domain.NodeStatus;
-import org.alfresco.repo.domain.Transaction;
-import org.alfresco.util.EqualsHelper;
-
-/**
- * Hibernate implementation of a node status
- *
- * @author Derek Hulley
- */
-public class NodeStatusImpl implements NodeStatus, Serializable
-{
- private static final long serialVersionUID = -802747893314715639L;
-
- private NodeKey key;
- private Long version;
- private Node node;
- private Transaction transaction;
-
- @Override
- public String toString()
- {
- StringBuilder sb = new StringBuilder(50);
- sb.append("NodeStatus")
- .append("[key=").append(key)
- .append(", node=").append(node == null ? null : node.getNodeRef())
- .append(", txn=").append(transaction)
- .append("]");
- return sb.toString();
- }
-
- public int hashCode()
- {
- return (key == null) ? 0 : key.hashCode();
- }
-
- public boolean equals(Object obj)
- {
- if (obj == this)
- return true;
- else if (obj == null)
- return false;
- else if (!(obj instanceof NodeStatusImpl))
- return false;
- NodeStatus that = (NodeStatus) obj;
- return (EqualsHelper.nullSafeEquals(this.key, that.getKey()));
-
- }
-
- public NodeKey getKey()
- {
- return key;
- }
-
- public void setKey(NodeKey key)
- {
- this.key = key;
- }
-
- public Long getVersion()
- {
- return version;
- }
-
- /**
- * For Hibernate use
- */
- @SuppressWarnings("unused")
- private void setVersion(Long version)
- {
- this.version = version;
- }
-
- public Node getNode()
- {
- return node;
- }
-
- public void setNode(Node node)
- {
- this.node = node;
- }
-
- public Transaction getTransaction()
- {
- return transaction;
- }
-
- public void setTransaction(Transaction transaction)
- {
- this.transaction = transaction;
- }
-
- public boolean isDeleted()
- {
- return (node == null);
- }
-}
diff --git a/source/java/org/alfresco/repo/domain/hibernate/Store.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Store.hbm.xml
deleted file mode 100644
index 4890153a0a..0000000000
--- a/source/java/org/alfresco/repo/domain/hibernate/Store.hbm.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java b/source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java
index 856791cbdf..4dcb4a4e31 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java
+++ b/source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java
@@ -31,8 +31,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.Store;
-import org.alfresco.repo.domain.StoreKey;
import org.alfresco.service.cmr.repository.StoreRef;
+import org.alfresco.util.EqualsHelper;
/**
* Hibernate-specific implementation of the domain entity store.
@@ -41,9 +41,11 @@ import org.alfresco.service.cmr.repository.StoreRef;
*/
public class StoreImpl implements Store, Serializable
{
- private static final long serialVersionUID = -6135740209100885890L;
-
- private StoreKey key;
+ private static final long serialVersionUID = -5501292033972362796L;
+
+ private Long id;
+ private String protocol;
+ private String identifier;
private Long version;
private Node rootNode;
@@ -83,7 +85,7 @@ public class StoreImpl implements Store, Serializable
// double check
if (storeRef == null )
{
- storeRef = new StoreRef(getKey().getProtocol(), getKey().getIdentifier());
+ storeRef = new StoreRef(protocol, identifier);
}
return storeRef;
}
@@ -119,7 +121,7 @@ public class StoreImpl implements Store, Serializable
return false;
}
Store that = (Store) obj;
- return (this.getKey().equals(that.getKey()));
+ return EqualsHelper.nullSafeEquals(this.getStoreRef(), that.getStoreRef());
}
/**
@@ -127,20 +129,53 @@ public class StoreImpl implements Store, Serializable
*/
public int hashCode()
{
- return getKey().hashCode();
+ return protocol.hashCode() + identifier.hashCode();
}
- public StoreKey getKey()
+ public Long getId()
{
- return key;
+ return id;
}
- public void setKey(StoreKey key)
+ /**
+ * For Hibernate use
+ */
+ @SuppressWarnings("unused")
+ private void setId(Long id)
+ {
+ this.id = id;
+ }
+
+ public String getProtocol()
+ {
+ return protocol;
+ }
+
+ public void setProtocol(String protocol)
{
refWriteLock.lock();
try
{
- this.key = key;
+ this.protocol = protocol;
+ this.storeRef = null;
+ }
+ finally
+ {
+ refWriteLock.unlock();
+ }
+ }
+
+ public String getIdentifier()
+ {
+ return identifier;
+ }
+
+ public void setIdentifier(String identifier)
+ {
+ refWriteLock.lock();
+ try
+ {
+ this.identifier = identifier;
this.storeRef = null;
}
finally
diff --git a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml
index 095362b8ff..207bb6f43f 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml
+++ b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml
@@ -122,53 +122,46 @@
]]>
-
- select
- count(txn.id)
- from
- org.alfresco.repo.domain.hibernate.TransactionImpl as txn
-
-
select
- count(status.key.guid)
+ count(node.uuid)
from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
- join status.transaction as txn
+ org.alfresco.repo.domain.hibernate.NodeImpl as node
+ join node.transaction as txn
where
txn.id = :txnId and
- status.node is not null
+ node.deleted = false
select
- count(status.key.guid)
+ count(node.uuid)
from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
- join status.transaction as txn
+ org.alfresco.repo.domain.hibernate.NodeImpl as node
+ join node.transaction as txn
where
txn.id = :txnId and
- status.node is null
+ node.deleted = true
select
- status
+ node
from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
+ org.alfresco.repo.domain.hibernate.NodeImpl as node
where
- status.transaction.id = :txnId and
- status.key.protocol = :protocol and
- status.key.identifier = :identifier
+ node.transaction.id = :txnId and
+ node.store.protocol = :protocol and
+ node.store.identifier = :identifier
select
- status
+ node
from
- org.alfresco.repo.domain.hibernate.NodeStatusImpl as status
+ org.alfresco.repo.domain.hibernate.NodeImpl as node
where
- status.transaction.id = :txnId
+ node.transaction.id = :txnId
diff --git a/source/java/org/alfresco/repo/domain/hibernate/VersionCount.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/VersionCount.hbm.xml
index 4a0c2a102c..aa6a521934 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/VersionCount.hbm.xml
+++ b/source/java/org/alfresco/repo/domain/hibernate/VersionCount.hbm.xml
@@ -15,17 +15,37 @@
select-before-update="false"
optimistic-lock="version" >
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+ select
+ cnt
+ from
+ org.alfresco.repo.domain.hibernate.VersionCountImpl as cnt
+ where
+ cnt.store.protocol = :protocol and
+ cnt.store.identifier = :identifier
+
+
diff --git a/source/java/org/alfresco/repo/domain/hibernate/VersionCountImpl.java b/source/java/org/alfresco/repo/domain/hibernate/VersionCountImpl.java
index 97fa223638..4c3023dd5a 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/VersionCountImpl.java
+++ b/source/java/org/alfresco/repo/domain/hibernate/VersionCountImpl.java
@@ -26,7 +26,7 @@ package org.alfresco.repo.domain.hibernate;
import java.io.Serializable;
-import org.alfresco.repo.domain.StoreKey;
+import org.alfresco.repo.domain.Store;
import org.alfresco.repo.domain.VersionCount;
/**
@@ -36,9 +36,10 @@ import org.alfresco.repo.domain.VersionCount;
*/
public class VersionCountImpl implements VersionCount, Serializable
{
- private static final long serialVersionUID = 6420375860928877809L;
+ private static final long serialVersionUID = 7778431129424069297L;
- private StoreKey key;
+ private Long id;
+ private Store store;
private long version;
private int versionCount;
@@ -65,7 +66,7 @@ public class VersionCountImpl implements VersionCount, Serializable
return false;
}
VersionCount that = (VersionCount) obj;
- return (this.getKey().equals(that.getKey()));
+ return (this.getStore().equals(that.getStore()));
}
/**
@@ -73,7 +74,7 @@ public class VersionCountImpl implements VersionCount, Serializable
*/
public int hashCode()
{
- return getKey().hashCode();
+ return getStore().hashCode();
}
/**
@@ -81,16 +82,31 @@ public class VersionCountImpl implements VersionCount, Serializable
*/
public String toString()
{
- return getKey().toString();
+ return getStore().toString();
}
- public StoreKey getKey() {
- return key;
+ public Long getId()
+ {
+ return id;
+ }
+
+ /**
+ * For Hibernate use
+ */
+ @SuppressWarnings("unused")
+ private void setId(Long id)
+ {
+ this.id = id;
+ }
+
+ public Store getStore()
+ {
+ return store;
}
- public void setKey(StoreKey key)
+ public void setStore(Store store)
{
- this.key = key;
+ this.store = store;
}
public Long getVersion()
diff --git a/source/java/org/alfresco/repo/domain/hibernate/VersionCounterDaoComponentImpl.java b/source/java/org/alfresco/repo/domain/hibernate/VersionCounterDaoComponentImpl.java
index 0803035f88..ba7c3a4831 100644
--- a/source/java/org/alfresco/repo/domain/hibernate/VersionCounterDaoComponentImpl.java
+++ b/source/java/org/alfresco/repo/domain/hibernate/VersionCounterDaoComponentImpl.java
@@ -24,16 +24,14 @@
*/
package org.alfresco.repo.domain.hibernate;
-import org.alfresco.repo.domain.StoreKey;
+import org.alfresco.repo.domain.Store;
import org.alfresco.repo.domain.VersionCount;
-import org.alfresco.repo.node.NodeServicePolicies;
-import org.alfresco.repo.policy.JavaBehaviour;
-import org.alfresco.repo.policy.PolicyComponent;
import org.alfresco.repo.version.common.counter.VersionCounterService;
+import org.alfresco.service.cmr.repository.InvalidStoreRefException;
import org.alfresco.service.cmr.repository.StoreRef;
-import org.alfresco.service.namespace.NamespaceService;
-import org.alfresco.service.namespace.QName;
-import org.hibernate.LockMode;
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
@@ -50,48 +48,80 @@ public class VersionCounterDaoComponentImpl
extends HibernateDaoSupport
implements VersionCounterService
{
+ private static final String QUERY_GET_VERSION_COUNT_FOR_STORE = "versionCount.GetVersionCountForStore";
+ private static final String QUERY_GET_STORE_BY_ALL = "store.GetStoreByAll";
+
/**
* Retrieves or creates a version counter. This locks the counter against updates for the
* current transaction.
*
- * @param storeKey the primary key of the counter
- * @return Returns a current or new version counter
+ * @param storeKey the primary key of the counter
+ * @param create true to create on demand
+ * @return Returns a current or new version counter
*/
- private VersionCount getVersionCounter(StoreRef storeRef)
+ private VersionCount getVersionCounter(final StoreRef storeRef, boolean create)
{
- final StoreKey storeKey = new StoreKey(storeRef.getProtocol(), storeRef.getIdentifier());
-
- // check if it exists
- VersionCount versionCount = (VersionCount) getHibernateTemplate().get(
- VersionCountImpl.class,
- storeKey,
- LockMode.UPGRADE);
- if (versionCount == null)
+ HibernateCallback callback = new HibernateCallback()
{
- // This could fail on some databases with concurrent adds. But it is only those databases
- // that don't lock the index against an addition of the row, and then it will only fail once.
- versionCount = new VersionCountImpl();
- versionCount.setKey(storeKey);
- getHibernateTemplate().save(versionCount);
- // debug
- if (logger.isDebugEnabled())
+ public Object doInHibernate(Session session)
{
- logger.debug("Created version counter: \n" +
- " Thread: " + Thread.currentThread().getName() + "\n" +
- " Version count: " + versionCount.getVersionCount());
+ Query query = session
+ .getNamedQuery(VersionCounterDaoComponentImpl.QUERY_GET_VERSION_COUNT_FOR_STORE)
+ .setString("protocol", storeRef.getProtocol())
+ .setString("identifier", storeRef.getIdentifier());
+ return query.uniqueResult();
}
- }
- else
+ };
+ VersionCount versionCount = (VersionCount) getHibernateTemplate().execute(callback);
+
+ // Done if it exists
+ if (versionCount != null)
{
- // debug
+ // Debug
if (logger.isDebugEnabled())
{
logger.debug("Got version counter: \n" +
" Thread: " + Thread.currentThread().getName() + "\n" +
" Version count: " + versionCount.getVersionCount());
}
+ // Done
+ return versionCount;
}
- // done
+ else if (!create)
+ {
+ return null;
+ }
+
+ // We have permission to create
+ callback = new HibernateCallback()
+ {
+ public Object doInHibernate(Session session)
+ {
+ Query query = session
+ .getNamedQuery(VersionCounterDaoComponentImpl.QUERY_GET_STORE_BY_ALL)
+ .setString("protocol", storeRef.getProtocol())
+ .setString("identifier", storeRef.getIdentifier());
+ return query.uniqueResult();
+ }
+ };
+ Store store = (Store) getHibernateTemplate().execute(callback);
+ if (store == null)
+ {
+ throw new InvalidStoreRefException(storeRef);
+ }
+ versionCount = new VersionCountImpl();
+ versionCount.setStore(store);
+ getHibernateTemplate().save(versionCount);
+
+ // Debug
+ if (logger.isDebugEnabled())
+ {
+ logger.debug("Created version counter: \n" +
+ " Thread: " + Thread.currentThread().getName() + "\n" +
+ " Version count: " + versionCount.getVersionCount());
+ }
+
+ // Done
return versionCount;
}
@@ -104,7 +134,7 @@ public class VersionCounterDaoComponentImpl
public int nextVersionNumber(StoreRef storeRef)
{
// get the version counter
- VersionCount versionCount = getVersionCounter(storeRef);
+ VersionCount versionCount = getVersionCounter(storeRef, true);
// get an incremented count
int nextCount = versionCount.incrementVersionCount();
@@ -127,9 +157,9 @@ public class VersionCounterDaoComponentImpl
public int currentVersionNumber(StoreRef storeRef)
{
// get the version counter
- VersionCount versionCounter = getVersionCounter(storeRef);
+ VersionCount versionCounter = getVersionCounter(storeRef, false);
// get an incremented count
- return versionCounter.getVersionCount();
+ return versionCounter == null ? 0 : versionCounter.getVersionCount();
}
/**
@@ -143,7 +173,7 @@ public class VersionCounterDaoComponentImpl
public synchronized void resetVersionNumber(StoreRef storeRef)
{
// get the version counter
- VersionCount versionCounter = getVersionCounter(storeRef);
+ VersionCount versionCounter = getVersionCounter(storeRef, true);
// get an incremented count
versionCounter.resetVersionCount();
}
@@ -160,9 +190,8 @@ public class VersionCounterDaoComponentImpl
public synchronized void setVersionNumber(StoreRef storeRef, int versionCount)
{
// get the version counter
- VersionCount versionCounter = getVersionCounter(storeRef);
+ VersionCount versionCounter = getVersionCounter(storeRef, true);
// get an incremented count
versionCounter.setVersionCount(versionCount);
}
-
}
diff --git a/source/java/org/alfresco/repo/domain/hibernate/dialect/AlfrescoOracle10gDialect.java b/source/java/org/alfresco/repo/domain/hibernate/dialect/AlfrescoOracle10gDialect.java
new file mode 100644
index 0000000000..8f4e70793a
--- /dev/null
+++ b/source/java/org/alfresco/repo/domain/hibernate/dialect/AlfrescoOracle10gDialect.java
@@ -0,0 +1,20 @@
+package org.alfresco.repo.domain.hibernate.dialect;
+
+import java.sql.Types;
+
+import org.hibernate.dialect.Oracle10gDialect;
+
+/**
+ * Does away with the deprecated LONG datatype.
+ *
+ * @author Derek Hulley
+ * @since 2.2.2
+ */
+public class AlfrescoOracle10gDialect extends Oracle10gDialect
+{
+ public AlfrescoOracle10gDialect()
+ {
+ super();
+ registerColumnType( Types.VARCHAR, "blob" );
+ }
+}
diff --git a/source/java/org/alfresco/repo/domain/hibernate/dialect/AlfrescoOracle9iDialect.java b/source/java/org/alfresco/repo/domain/hibernate/dialect/AlfrescoOracle9iDialect.java
new file mode 100644
index 0000000000..199eb8d140
--- /dev/null
+++ b/source/java/org/alfresco/repo/domain/hibernate/dialect/AlfrescoOracle9iDialect.java
@@ -0,0 +1,20 @@
+package org.alfresco.repo.domain.hibernate.dialect;
+
+import java.sql.Types;
+
+import org.hibernate.dialect.Oracle9iDialect;
+
+/**
+ * Does away with the deprecated LONG datatype.
+ *
+ * @author Derek Hulley
+ * @since 2.2.2
+ */
+public class AlfrescoOracle9iDialect extends Oracle9iDialect
+{
+ public AlfrescoOracle9iDialect()
+ {
+ super();
+ registerColumnType( Types.VARCHAR, "blob" );
+ }
+}
diff --git a/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java
index 7a55e720a6..60e31e435e 100644
--- a/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java
+++ b/source/java/org/alfresco/repo/domain/schema/SchemaBootstrap.java
@@ -46,6 +46,8 @@ import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.admin.patch.impl.SchemaUpgradeScriptPatch;
import org.alfresco.repo.content.filestore.FileContentWriter;
import org.alfresco.repo.domain.PropertyValue;
+import org.alfresco.repo.domain.hibernate.dialect.AlfrescoOracle10gDialect;
+import org.alfresco.repo.domain.hibernate.dialect.AlfrescoOracle9iDialect;
import org.alfresco.repo.domain.hibernate.dialect.AlfrescoSQLServerDialect;
import org.alfresco.repo.domain.hibernate.dialect.AlfrescoSybaseAnywhereDialect;
import org.alfresco.service.ServiceRegistry;
@@ -71,7 +73,10 @@ import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.MySQL5Dialect;
import org.hibernate.dialect.MySQLDialect;
import org.hibernate.dialect.MySQLInnoDBDialect;
+import org.hibernate.dialect.Oracle10gDialect;
import org.hibernate.dialect.Oracle9Dialect;
+import org.hibernate.dialect.Oracle9iDialect;
+import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.engine.ActionQueue;
import org.hibernate.tool.hbm2ddl.DatabaseMetadata;
@@ -103,6 +108,7 @@ public class SchemaBootstrap extends AbstractLifecycleBean
private static final String MSG_EXECUTING_STATEMENT = "schema.update.msg.executing_statement";
private static final String MSG_OPTIONAL_STATEMENT_FAILED = "schema.update.msg.optional_statement_failed";
private static final String WARN_DIALECT_UNSUPPORTED = "schema.update.warn.dialect_unsupported";
+ private static final String WARN_DIALECT_SUBSTITUTING = "schema.update.warn.dialect_substituting";
private static final String WARN_DIALECT_HSQL = "schema.update.warn.dialect_hsql";
private static final String WARN_DIALECT_DERBY = "schema.update.warn.dialect_derby";
private static final String ERR_MULTIPLE_SCHEMAS = "schema.update.err.found_multiple";
@@ -509,6 +515,40 @@ public class SchemaBootstrap extends AbstractLifecycleBean
}
}
+ /**
+ * Finds the version.properties file and determines the installed version.schema.
+ * The only way to determine the original installed schema number is by quering the for the minimum value in
+ * alf_applied_patch.applied_to_schema. This might not work if an upgrade is attempted straight from
+ * Alfresco v1.0!
+ *
+ * @return the installed schema number or -1 if the installation is new.
+ */
+ private int getInstalledSchemaNumber(Connection connection) throws Exception
+ {
+ Statement stmt = connection.createStatement();
+ try
+ {
+ ResultSet rs = stmt.executeQuery(
+ "select min(applied_to_schema) from alf_applied_patch where applied_to_schema > -1;");
+ if (!rs.next())
+ {
+ // Nothing in the table
+ return -1;
+ }
+ if (rs.getObject(1) == null)
+ {
+ // Nothing in the table
+ return -1;
+ }
+ int installedSchema = rs.getInt(1);
+ return installedSchema;
+ }
+ finally
+ {
+ try { stmt.close(); } catch (Throwable e) {}
+ }
+ }
+
private static class LockFailedException extends Exception
{
private static final long serialVersionUID = -6676398230191205456L;
@@ -655,6 +695,8 @@ public class SchemaBootstrap extends AbstractLifecycleBean
// and patches will not have been applied yet
return;
}
+ // Retrieve the first installed schema number
+ int installedSchema = getInstalledSchemaNumber(connection);
for (SchemaUpgradeScriptPatch patch : scriptPatches)
{
@@ -669,6 +711,11 @@ public class SchemaBootstrap extends AbstractLifecycleBean
// with the patch bean present.
continue;
}
+ else if (!patch.applies(installedSchema))
+ {
+ // Patch does not apply to the installed schema number
+ continue;
+ }
else if (!apply)
{
// the script was not run and may not be run automatically
@@ -820,7 +867,7 @@ public class SchemaBootstrap extends AbstractLifecycleBean
{
// Get the end of statement
int endIndex = sql.lastIndexOf(';');
- if (endIndex > 0)
+ if (endIndex > -1)
{
sql = sql.substring(0, endIndex);
execute = true;
@@ -892,6 +939,42 @@ public class SchemaBootstrap extends AbstractLifecycleBean
}
}
+ /**
+ * Substitute the dialect with an alternative, if possible.
+ */
+ private void changeDialect(Configuration cfg)
+ {
+ String dialectName = cfg.getProperty(Environment.DIALECT);
+ if (dialectName == null)
+ {
+ return;
+ }
+ else if (dialectName.equals(Oracle9iDialect.class.getName()))
+ {
+ String subst = AlfrescoOracle9iDialect.class.getName();
+ LogUtil.warn(logger, WARN_DIALECT_SUBSTITUTING, dialectName, subst);
+ cfg.setProperty(Environment.DIALECT, subst);
+ }
+ else if (dialectName.equals(Oracle10gDialect.class.getName()))
+ {
+ String subst = AlfrescoOracle10gDialect.class.getName();
+ LogUtil.warn(logger, WARN_DIALECT_SUBSTITUTING, dialectName, subst);
+ cfg.setProperty(Environment.DIALECT, subst);
+ }
+ else if (dialectName.equals(MySQLDialect.class.getName()))
+ {
+ String subst = MySQLInnoDBDialect.class.getName();
+ LogUtil.warn(logger, WARN_DIALECT_SUBSTITUTING, dialectName, subst);
+ cfg.setProperty(Environment.DIALECT, subst);
+ }
+ else if (dialectName.equals(MySQL5Dialect.class.getName()))
+ {
+ String subst = MySQLInnoDBDialect.class.getName();
+ LogUtil.warn(logger, WARN_DIALECT_SUBSTITUTING, dialectName, subst);
+ cfg.setProperty(Environment.DIALECT, subst);
+ }
+ }
+
/**
* Performs dialect-specific checking. This includes checking for InnoDB, dumping the dialect being used
* as well as setting any runtime, dialect-specific properties.
@@ -904,14 +987,18 @@ public class SchemaBootstrap extends AbstractLifecycleBean
{
LogUtil.warn(logger, WARN_DIALECT_UNSUPPORTED, dialectClazz.getName());
}
- if (dialectClazz.equals(HSQLDialect.class))
+ else if (dialectClazz.equals(HSQLDialect.class))
{
LogUtil.info(logger, WARN_DIALECT_HSQL);
}
- if (dialectClazz.equals(DerbyDialect.class))
+ else if (dialectClazz.equals(DerbyDialect.class))
{
LogUtil.info(logger, WARN_DIALECT_DERBY);
}
+ else if (dialectClazz.equals(OracleDialect.class) || dialectClazz.equals(Oracle9Dialect.class))
+ {
+ LogUtil.warn(logger, WARN_DIALECT_UNSUPPORTED, dialectClazz.getName());
+ }
int maxStringLength = SchemaBootstrap.DEFAULT_MAX_STRING_LENGTH;
// Adjust the maximum allowable String length according to the dialect
@@ -945,10 +1032,10 @@ public class SchemaBootstrap extends AbstractLifecycleBean
// serializable_value blob,
maxStringLength = Integer.MAX_VALUE;
}
- else if (dialect instanceof Oracle9Dialect)
+ else if (dialect instanceof OracleDialect)
{
// string_value varchar2(1024 char),
- // serializable_value long raw,
+ // serializable_value blob,
maxStringLength = SchemaBootstrap.DEFAULT_MAX_STRING_LENGTH;
}
else if (dialect instanceof PostgreSQLDialect)
@@ -980,6 +1067,9 @@ public class SchemaBootstrap extends AbstractLifecycleBean
Configuration cfg = localSessionFactory.getConfiguration();
+ // Fix the dialect
+ changeDialect(cfg);
+
// Check and dump the dialect being used
Dialect dialect = Dialect.getDialect(cfg.getProperties());
checkDialect(dialect);
diff --git a/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java b/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java
index 86ee356f9d..eb631a8992 100644
--- a/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java
+++ b/source/java/org/alfresco/repo/node/AbstractNodeServiceImpl.java
@@ -25,7 +25,6 @@
package org.alfresco.repo.node;
import java.io.Serializable;
-import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
@@ -34,7 +33,6 @@ import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import org.alfresco.model.ContentModel;
-import org.alfresco.repo.domain.PropertyValue;
import org.alfresco.repo.node.NodeServicePolicies.BeforeAddAspectPolicy;
import org.alfresco.repo.node.NodeServicePolicies.BeforeCreateChildAssociationPolicy;
import org.alfresco.repo.node.NodeServicePolicies.BeforeCreateNodeAssociationPolicy;
@@ -65,7 +63,6 @@ import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.security.authentication.AuthenticationUtil.RunAsWork;
import org.alfresco.service.cmr.dictionary.ClassDefinition;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
-import org.alfresco.service.cmr.dictionary.DictionaryException;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.repository.AssociationRef;
@@ -74,7 +71,6 @@ import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.NodeService;
import org.alfresco.service.cmr.repository.StoreRef;
-import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
import org.alfresco.service.namespace.QName;
import org.alfresco.service.namespace.QNamePattern;
import org.alfresco.service.namespace.RegexQNamePattern;
@@ -605,115 +601,6 @@ public abstract class AbstractNodeServiceImpl implements NodeService
{
return getChildAssocs(nodeRef, RegexQNamePattern.MATCH_ALL, RegexQNamePattern.MATCH_ALL);
}
-
- /**
- * Helper method to convert the Serializable value into a full,
- * persistable {@link PropertyValue}.
- *
- * Where the property definition is null, the value will take on the
- * {@link DataTypeDefinition#ANY generic ANY} value.
- *
- * Where the property definition specifies a multi-valued property but the
- * value provided is not a collection, the value will be wrapped in a collection.
- *
- * @param propertyDef the property dictionary definition, may be null
- * @param value the value, which will be converted according to the definition -
- * may be null
- * @return Returns the persistable property value
- */
- protected PropertyValue makePropertyValue(PropertyDefinition propertyDef, Serializable value)
- {
- // get property attributes
- QName propertyTypeQName = null;
- if (propertyDef == null) // property not recognised
- {
- // allow it for now - persisting excess properties can be useful sometimes
- propertyTypeQName = DataTypeDefinition.ANY;
- }
- else
- {
- propertyTypeQName = propertyDef.getDataType().getName();
- // check that multi-valued properties are allowed
- boolean isMultiValued = propertyDef.isMultiValued();
- if (isMultiValued && !(value instanceof Collection))
- {
- if (value != null)
- {
- // put the value into a collection
- // the implementation gives back a Serializable list
- value = (Serializable) Collections.singletonList(value);
- }
- }
- else if (!isMultiValued && (value instanceof Collection))
- {
- // we only allow this case if the property type is ANY
- if (!propertyTypeQName.equals(DataTypeDefinition.ANY))
- {
- throw new DictionaryException(
- "A single-valued property of this type may not be a collection: \n" +
- " Property: " + propertyDef + "\n" +
- " Type: " + propertyTypeQName + "\n" +
- " Value: " + value);
- }
- }
- }
- try
- {
- PropertyValue propertyValue = new PropertyValue(propertyTypeQName, value);
- // done
- return propertyValue;
- }
- catch (TypeConversionException e)
- {
- throw new TypeConversionException(
- "The property value is not compatible with the type defined for the property: \n" +
- " property: " + (propertyDef == null ? "unknown" : propertyDef) + "\n" +
- " value: " + value + "\n" +
- " value type: " + value.getClass(),
- e);
- }
- }
-
- /**
- * Extracts the externally-visible property from the {@link PropertyValue propertyValue}.
- *
- * @param propertyDef the model property definition - may be null
- * @param propertyValue the persisted property
- * @return Returns the value of the property in the format dictated by the property
- * definition, or null if the property value is null
- */
- protected Serializable makeSerializableValue(PropertyDefinition propertyDef, PropertyValue propertyValue)
- {
- if (propertyValue == null)
- {
- return null;
- }
- // get property attributes
- QName propertyTypeQName = null;
- if (propertyDef == null)
- {
- // allow this for now
- propertyTypeQName = DataTypeDefinition.ANY;
- }
- else
- {
- propertyTypeQName = propertyDef.getDataType().getName();
- }
- try
- {
- Serializable value = propertyValue.getValue(propertyTypeQName);
- // done
- return value;
- }
- catch (TypeConversionException e)
- {
- throw new TypeConversionException(
- "The property value is not compatible with the type defined for the property: \n" +
- " property: " + (propertyDef == null ? "unknown" : propertyDef) + "\n" +
- " property value: " + propertyValue,
- e);
- }
- }
protected Map getDefaultProperties(QName typeQName)
{
diff --git a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java
index d6a5498028..de37f923e3 100644
--- a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java
+++ b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java
@@ -102,6 +102,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
public static final String TEST_PREFIX = "test";
public static final QName TYPE_QNAME_TEST_CONTENT = QName.createQName(NAMESPACE, "content");
public static final QName TYPE_QNAME_TEST_MANY_PROPERTIES = QName.createQName(NAMESPACE, "many-properties");
+ public static final QName TYPE_QNAME_TEST_MANY_ML_PROPERTIES = QName.createQName(NAMESPACE, "many-ml-properties");
public static final QName TYPE_QNAME_EXTENDED_CONTENT = QName.createQName(NAMESPACE, "extendedcontent");
public static final QName ASPECT_QNAME_TEST_TITLED = QName.createQName(NAMESPACE, "titled");
public static final QName ASPECT_QNAME_TEST_MARKER = QName.createQName(NAMESPACE, "marker");
@@ -127,6 +128,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
public static final QName PROP_QNAME_LOCALE_VALUE = QName.createQName(NAMESPACE, "localeValue");
public static final QName PROP_QNAME_NULL_VALUE = QName.createQName(NAMESPACE, "nullValue");
public static final QName PROP_QNAME_MULTI_VALUE = QName.createQName(NAMESPACE, "multiValue");
+ public static final QName PROP_QNAME_MULTI_ML_VALUE = QName.createQName(NAMESPACE, "multiMLValue");
public static final QName PROP_QNAME_PROP1 = QName.createQName(NAMESPACE, "prop1");
public static final QName PROP_QNAME_PROP2 = QName.createQName(NAMESPACE, "prop2");
public static final QName ASSOC_TYPE_QNAME_TEST_CHILDREN = ContentModel.ASSOC_CHILDREN;
@@ -414,6 +416,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
return ret;
}
+ @SuppressWarnings("unchecked")
private int countNodesByReference(NodeRef nodeRef)
{
String query =
@@ -422,8 +425,9 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
NodeImpl.class.getName() + " node" +
" where" +
" node.uuid = ? and" +
- " node.store.key.protocol = ? and" +
- " node.store.key.identifier = ?";
+ " node.deleted = false and" +
+ " node.store.protocol = ? and" +
+ " node.store.identifier = ?";
Session session = getSession();
List results = session.createQuery(query)
.setString(0, nodeRef.getId())
@@ -944,6 +948,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
endTransaction();
}
+ @SuppressWarnings("unchecked")
private int countChildrenOfNode(NodeRef nodeRef)
{
String query =
@@ -951,7 +956,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
" from " +
ChildAssocImpl.class.getName() + " childAssoc" +
" join childAssoc.parent node" +
- " where node.uuid = ? and node.store.key.protocol = ? and node.store.key.identifier = ?";
+ " where node.uuid = ? and node.store.protocol = ? and node.store.identifier = ?";
Session session = getSession();
List results = session.createQuery(query)
.setString(0, nodeRef.getId())
@@ -1356,6 +1361,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
* Check that properties go in and come out in the correct format.
* @see #getCheckPropertyValues(Map)
*/
+ @SuppressWarnings("unchecked")
public void testPropertyTypes() throws Exception
{
ArrayList listProperty = new ArrayList(2);
@@ -1415,6 +1421,127 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
assertTrue("Collection doesn't contain value", ((Collection)checkProperty).contains("GHI"));
}
+ /**
+ * Checks that empty collections can be persisted
+ */
+ @SuppressWarnings("unchecked")
+ public void testEmptyCollections() throws Exception
+ {
+ NodeRef nodeRef = nodeService.createNode(
+ rootNodeRef,
+ ASSOC_TYPE_QNAME_TEST_CHILDREN,
+ QName.createQName("pathA"),
+ TYPE_QNAME_TEST_MANY_PROPERTIES).getChildRef();
+
+ List filledCollection = new ArrayList(2);
+ filledCollection.add("ABC");
+ filledCollection.add("DEF");
+ List emptyCollection = Collections.emptyList();
+
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, (Serializable) filledCollection);
+ List checkFilledCollection = (List) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_VALUE);
+ assertEquals("Filled collection didn't come back with correct values", filledCollection, checkFilledCollection);
+
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, (Serializable) emptyCollection);
+ List checkEmptyCollection = (List) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_VALUE);
+ assertEquals("Empty collection didn't come back with correct values", emptyCollection, checkEmptyCollection);
+
+ // Check that a null value is returned as null
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, null);
+ List checkNullCollection = (List) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_VALUE);
+ assertNull("Null property should stay null", checkNullCollection);
+ }
+
+ /**
+ * Checks that large collections can be persisted
+ */
+ @SuppressWarnings("unchecked")
+ public void testBigCollections() throws Exception
+ {
+ NodeRef nodeRef = nodeService.createNode(
+ rootNodeRef,
+ ASSOC_TYPE_QNAME_TEST_CHILDREN,
+ QName.createQName("pathA"),
+ TYPE_QNAME_TEST_MANY_PROPERTIES).getChildRef();
+
+ for (int inc = 0; inc < 5; inc++)
+ {
+ System.out.println("----------------------------------------------");
+ int collectionSize = (int) Math.pow(10, inc);
+ List largeCollection = new ArrayList(collectionSize);
+ for (int i = 0; i < collectionSize; i++)
+ {
+ largeCollection.add(String.format("Large-collection-value-%05d", i));
+ }
+ List emptyCollection = Collections.emptyList();
+
+ long t1 = System.nanoTime();
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, (Serializable) largeCollection);
+ double tDelta = (double)(System.nanoTime() - t1)/1E6;
+ System.out.println("Setting " + collectionSize + " multi-valued property took: " + tDelta + "ms");
+ // Now get it back
+ t1 = System.nanoTime();
+ List checkLargeCollection = (List) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_VALUE);
+ tDelta = (double)(System.nanoTime() - t1)/1E6;
+ System.out.println("First fetch of " + collectionSize + " multi-valued property took: " + tDelta + "ms");
+ assertEquals("Large collection didn't come back with correct values", largeCollection, checkLargeCollection);
+
+ // Get it back again
+ t1 = System.nanoTime();
+ checkLargeCollection = (List) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_VALUE);
+ tDelta = (double)(System.nanoTime() - t1)/1E6;
+ System.out.println("Second fetch of " + collectionSize + " multi-valued property took: " + tDelta + "ms");
+
+ // Add a value
+ largeCollection.add("First addition");
+ t1 = System.nanoTime();
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, (Serializable) largeCollection);
+ tDelta = (double)(System.nanoTime() - t1)/1E6;
+ System.out.println("Re-setting " + largeCollection.size() + " multi-valued property took: " + tDelta + "ms");
+
+ // Add another value
+ largeCollection.add("Second addition");
+ t1 = System.nanoTime();
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, (Serializable) largeCollection);
+ tDelta = (double)(System.nanoTime() - t1)/1E6;
+ System.out.println("Re-setting " + largeCollection.size() + " multi-valued property took: " + tDelta + "ms");
+
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, (Serializable) emptyCollection);
+ List checkEmptyCollection = (List) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_VALUE);
+ assertEquals("Empty collection didn't come back with correct values", emptyCollection, checkEmptyCollection);
+
+ // Check that a null value is returned as null
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_VALUE, null);
+ List checkNullCollection = (List) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_VALUE);
+ assertNull("Null property should stay null", checkNullCollection);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testMultiValueMLTextProperties() throws Exception
+ {
+ NodeRef nodeRef = nodeService.createNode(
+ rootNodeRef,
+ ASSOC_TYPE_QNAME_TEST_CHILDREN,
+ QName.createQName("pathA"),
+ TYPE_QNAME_TEST_MANY_ML_PROPERTIES).getChildRef();
+
+ // Create MLText properties and add to a collection
+ List mlTextCollection = new ArrayList(2);
+ MLText mlText0 = new MLText();
+ mlText0.addValue(Locale.ENGLISH, "Hello");
+ mlText0.addValue(Locale.FRENCH, "Bonjour");
+ mlTextCollection.add(mlText0);
+ MLText mlText1 = new MLText();
+ mlText1.addValue(Locale.ENGLISH, "Bye bye");
+ mlText1.addValue(Locale.FRENCH, "Au revoir");
+ mlTextCollection.add(mlText1);
+
+ nodeService.setProperty(nodeRef, PROP_QNAME_MULTI_ML_VALUE, (Serializable) mlTextCollection);
+ Collection mlTextCollectionCheck = (Collection) nodeService.getProperty(nodeRef, PROP_QNAME_MULTI_ML_VALUE);
+ assertEquals("MLText collection didn't come back correctly.", mlTextCollection, mlTextCollectionCheck);
+ }
+
/**
* Checks that the {@link ContentModel#ASPECT_REFERENCEABLE referencable} properties
* are present
@@ -2083,13 +2210,13 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
public void testAR782() throws Exception
{
Map properties = nodeService.getProperties(rootNodeRef);
- // Set cm:created correctly
- properties.put(ContentModel.PROP_CREATED, new Date());
+ // Set usr:accountExpiryDate correctly
+ properties.put(ContentModel.PROP_ACCOUNT_EXPIRY_DATE, new Date());
nodeService.setProperties(rootNodeRef, properties);
try
{
- // Set cm:created using something that can't be converted to a Date
- properties.put(ContentModel.PROP_CREATED, "blah");
+ // Set usr:accountExpiryDate using something that can't be converted to a Date
+ properties.put(ContentModel.PROP_ACCOUNT_EXPIRY_DATE, "blah");
nodeService.setProperties(rootNodeRef, properties);
fail("Failed to catch type conversion issue early.");
}
diff --git a/source/java/org/alfresco/repo/node/BaseNodeServiceTest_model.xml b/source/java/org/alfresco/repo/node/BaseNodeServiceTest_model.xml
index a5a6c057fe..49f5f11b5a 100644
--- a/source/java/org/alfresco/repo/node/BaseNodeServiceTest_model.xml
+++ b/source/java/org/alfresco/repo/node/BaseNodeServiceTest_model.xml
@@ -275,6 +275,18 @@
+
+ Busy2
+ sys:base
+
+
+ d:mltext
+ true
+ true
+
+
+
+
Rendition Pagesys:base
diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java
index 38bd762ac0..314395e9d3 100644
--- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java
+++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java
@@ -40,7 +40,6 @@ import java.util.Stack;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.domain.Node;
-import org.alfresco.repo.domain.PropertyValue;
import org.alfresco.repo.node.AbstractNodeServiceImpl;
import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.db.NodeDaoService.NodeRefQueryCallback;
@@ -52,7 +51,6 @@ import org.alfresco.service.cmr.dictionary.AspectDefinition;
import org.alfresco.service.cmr.dictionary.AssociationDefinition;
import org.alfresco.service.cmr.dictionary.ChildAssociationDefinition;
import org.alfresco.service.cmr.dictionary.ClassDefinition;
-import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.InvalidAspectException;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
@@ -313,21 +311,19 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
// set the properties passed in
if (properties.size() > 0)
{
- Map propertiesConverted = convertProperties(properties);
- nodeDaoService.addNodeProperties(childNodePair.getFirst(), propertiesConverted);
+ nodeDaoService.addNodeProperties(childNodePair.getFirst(), properties);
}
- Map propertiesAfterValues = nodeDaoService.getNodeProperties(childNodePair.getFirst());
+ Map propertiesAfter = nodeDaoService.getNodeProperties(childNodePair.getFirst());
// Ensure child uniqueness
- String newName = extractNameProperty(propertiesAfterValues);
+ String newName = extractNameProperty(propertiesAfter);
// Ensure uniqueness. Note that the cm:name may be null, in which case the uniqueness is still
setChildNameUnique(childAssocPair, newName, null); // ensure uniqueness
// Invoke policy behaviour
invokeOnCreateNode(childAssocRef);
invokeOnCreateChildAssociation(childAssocRef, true);
- Map propertiesAfter = convertPropertyValues(propertiesAfterValues);
addIntrinsicProperties(childNodePair, propertiesAfter);
invokeOnUpdateProperties(
childAssocRef.getChildRef(),
@@ -364,16 +360,20 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
}
// Get the existing values
Long nodeId = nodePair.getFirst();
- Map existingPropertyValues = nodeDaoService.getNodeProperties(nodeId);
+ Map existingProperties = nodeDaoService.getNodeProperties(nodeId);
Set existingAspects = nodeDaoService.getNodeAspects(nodeId);
- return addDefaultAspects(nodePair, existingAspects, existingPropertyValues, typeQName);
+ return addDefaultAspects(nodePair, existingAspects, existingProperties, typeQName);
}
/**
* Add the default aspects to a given node
* @return Returns true if any aspects were added
*/
- private boolean addDefaultAspects(Pair nodePair, Set existingAspects, Map existingPropertyValues, QName typeQName)
+ private boolean addDefaultAspects(
+ Pair nodePair,
+ Set existingAspects,
+ Map existingProperties,
+ QName typeQName)
{
ClassDefinition classDefinition = dictionaryService.getClass(typeQName);
if (classDefinition == null)
@@ -428,33 +428,32 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
}
// Get the existing values
Long nodeId = nodePair.getFirst();
- Map existingPropertyValues = nodeDaoService.getNodeProperties(nodeId);
- return addDefaultProperties(nodePair, existingPropertyValues, typeQName);
+ Map existingProperties = nodeDaoService.getNodeProperties(nodeId);
+ return addDefaultProperties(nodePair, existingProperties, typeQName);
}
/**
* Adds default properties for the given type to the node. Default values will not be set if there are existing values.
*/
- private boolean addDefaultProperties(Pair nodePair, Map existingPropertyValues, QName typeQName)
+ private boolean addDefaultProperties(Pair nodePair, Map existingProperties, QName typeQName)
{
Long nodeId = nodePair.getFirst();
// Get the default properties for this aspect
Map defaultProperties = getDefaultProperties(typeQName);
- Map defaultPropertyValues = this.convertProperties(defaultProperties);
// Remove all default values where a value already exists
- for (Map.Entry entry : existingPropertyValues.entrySet())
+ for (Map.Entry entry : existingProperties.entrySet())
{
QName existingPropertyQName = entry.getKey();
- PropertyValue existingPropertyValue = entry.getValue();
- if (existingPropertyValue != null)
+ Serializable existingProperty = entry.getValue();
+ if (existingProperty != null)
{
- defaultPropertyValues.remove(existingPropertyQName);
+ defaultProperties.remove(existingPropertyQName);
}
}
// Add the properties to the node - but only if there is anything to set
- if (defaultPropertyValues.size() > 0)
+ if (defaultProperties.size() > 0)
{
- nodeDaoService.addNodeProperties(nodeId, defaultPropertyValues);
+ nodeDaoService.addNodeProperties(nodeId, defaultProperties);
return true;
}
else
@@ -567,8 +566,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
if (aspectProperties.size() > 0)
{
- Map aspectPropertyValues = convertProperties(aspectProperties);
- nodeDaoService.addNodeProperties(nodeId, aspectPropertyValues);
+ nodeDaoService.addNodeProperties(nodeId, aspectProperties);
}
if (!nodeDaoService.hasNodeAspect(nodeId, aspectTypeQName))
@@ -995,20 +993,16 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
return nodeId;
}
- PropertyValue propertyValue = nodeDaoService.getNodeProperty(nodeId, qname);
+ Serializable property = nodeDaoService.getNodeProperty(nodeId, qname);
// check if we need to provide a spoofed name
- if (propertyValue == null && qname.equals(ContentModel.PROP_NAME))
+ if (property == null && qname.equals(ContentModel.PROP_NAME))
{
return nodeRef.getId();
}
- // get the property definition
- PropertyDefinition propertyDef = dictionaryService.getProperty(qname);
- // convert to the correct type
- Serializable value = makeSerializableValue(propertyDef, propertyValue);
// done
- return value;
+ return property;
}
public Map getProperties(NodeRef nodeRef) throws InvalidNodeRefException
@@ -1023,24 +1017,11 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
private Map getPropertiesImpl(Pair nodePair) throws InvalidNodeRefException
{
Long nodeId = nodePair.getFirst();
- Map nodeProperties = nodeDaoService.getNodeProperties(nodeId);
- Map ret = new HashMap(nodeProperties.size());
- // copy values
- for (Map.Entry entry: nodeProperties.entrySet())
- {
- QName propertyQName = entry.getKey();
- PropertyValue propertyValue = entry.getValue();
- // get the property definition
- PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
- // convert to the correct type
- Serializable value = makeSerializableValue(propertyDef, propertyValue);
- // copy across
- ret.put(propertyQName, value);
- }
+ Map nodeProperties = nodeDaoService.getNodeProperties(nodeId);
// spoof referencable properties
- addIntrinsicProperties(nodePair, ret);
+ addIntrinsicProperties(nodePair, nodeProperties);
// done
- return ret;
+ return nodeProperties;
}
/**
@@ -1099,10 +1080,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
}
}
// Set the property
- PropertyDefinition propertyDef = dictionaryService.getProperty(qname);
- // get a persistable value
- PropertyValue propertyValue = makePropertyValue(propertyDef, value);
- nodeDaoService.addNodeProperty(nodeId, qname, propertyValue);
+ nodeDaoService.addNodeProperty(nodeId, qname, value);
}
}
@@ -1155,10 +1133,8 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
}
// Now remove special properties
extractIntrinsicProperties(properties);
- // convert the map
- Map propertyValues = convertProperties(properties);
// Update the node
- nodeDaoService.setNodeProperties(nodeId, propertyValues);
+ nodeDaoService.setNodeProperties(nodeId, properties);
}
public void removeProperty(NodeRef nodeRef, QName qname) throws InvalidNodeRefException
@@ -1194,42 +1170,42 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
nodeIndexer.indexUpdateNode(nodeRef);
}
- private Map convertProperties(Map properties) throws InvalidNodeRefException
- {
- Map convertedProperties = new HashMap(17);
-
- // check the property type and copy the values across
- for (QName propertyQName : properties.keySet())
- {
- PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
- Serializable value = properties.get(propertyQName);
- // get a persistable value
- PropertyValue propertyValue = makePropertyValue(propertyDef, value);
- convertedProperties.put(propertyQName, propertyValue);
- }
-
- // Return the converted properties
- return convertedProperties;
- }
-
- private Map convertPropertyValues(Map propertyValues) throws InvalidNodeRefException
- {
- Map convertedProperties = new HashMap(17);
-
- // check the property type and copy the values across
- for (Map.Entry entry : propertyValues.entrySet())
- {
- QName propertyQName = entry.getKey();
- PropertyValue propertyValue = entry.getValue();
- PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
- Serializable property = makeSerializableValue(propertyDef, propertyValue);
- convertedProperties.put(propertyQName, property);
- }
-
- // Return the converted properties
- return convertedProperties;
- }
-
+// private Map convertProperties(Map properties) throws InvalidNodeRefException
+// {
+// Map convertedProperties = new HashMap(17);
+//
+// // check the property type and copy the values across
+// for (QName propertyQName : properties.keySet())
+// {
+// PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
+// Serializable value = properties.get(propertyQName);
+// // get a persistable value
+// PropertyValue propertyValue = makePropertyValue(propertyDef, value);
+// convertedProperties.put(propertyQName, propertyValue);
+// }
+//
+// // Return the converted properties
+// return convertedProperties;
+// }
+//
+// private Map convertPropertyValues(Map propertyValues) throws InvalidNodeRefException
+// {
+// Map convertedProperties = new HashMap(17);
+//
+// // check the property type and copy the values across
+// for (Map.Entry entry : propertyValues.entrySet())
+// {
+// QName propertyQName = entry.getKey();
+// PropertyValue propertyValue = entry.getValue();
+// PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
+// Serializable property = makeSerializableValue(propertyDef, propertyValue);
+// convertedProperties.put(propertyQName, property);
+// }
+//
+// // Return the converted properties
+// return convertedProperties;
+// }
+//
public Collection getParents(NodeRef nodeRef) throws InvalidNodeRefException
{
// Get the node
@@ -1708,41 +1684,29 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
Long nodeId = nodePair.getFirst();
Pair primaryParentAssocPair = nodeDaoService.getPrimaryParentAssoc(nodeId);
Set newAspects = new HashSet(5);
- Map existingPropertyValues = nodeDaoService.getNodeProperties(nodeId);
- Map newPropertyValues = new HashMap(11);
+ Map existingProperties = nodeDaoService.getNodeProperties(nodeId);
+ Map newProperties = new HashMap(11);
// add the aspect
newAspects.add(ContentModel.ASPECT_ARCHIVED);
- PropertyValue archivedByProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_BY),
- AuthenticationUtil.getCurrentUserName());
- newPropertyValues.put(ContentModel.PROP_ARCHIVED_BY, archivedByProperty);
- PropertyValue archivedDateProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_DATE),
- new Date());
- newPropertyValues.put(ContentModel.PROP_ARCHIVED_DATE, archivedDateProperty);
- PropertyValue archivedPrimaryParentNodeRefProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC),
- primaryParentAssocPair.getSecond());
- newPropertyValues.put(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC, archivedPrimaryParentNodeRefProperty);
- PropertyValue originalOwnerProperty = existingPropertyValues.get(ContentModel.PROP_OWNER);
- PropertyValue originalCreatorProperty = existingPropertyValues.get(ContentModel.PROP_CREATOR);
- if (originalOwnerProperty != null || originalCreatorProperty != null)
+ newProperties.put(ContentModel.PROP_ARCHIVED_BY, AuthenticationUtil.getCurrentUserName());
+ newProperties.put(ContentModel.PROP_ARCHIVED_DATE, new Date());
+ newProperties.put(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC, primaryParentAssocPair.getSecond());
+ Serializable originalOwner = existingProperties.get(ContentModel.PROP_OWNER);
+ Serializable originalCreator = existingProperties.get(ContentModel.PROP_CREATOR);
+ if (originalOwner != null || originalCreator != null)
{
- newPropertyValues.put(
+ newProperties.put(
ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER,
- originalOwnerProperty != null ? originalOwnerProperty : originalCreatorProperty);
+ originalOwner != null ? originalOwner : originalCreator);
}
// change the node ownership
newAspects.add(ContentModel.ASPECT_OWNABLE);
- PropertyValue newOwnerProperty = makePropertyValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER),
- AuthenticationUtil.getCurrentUserName());
- newPropertyValues.put(ContentModel.PROP_OWNER, newOwnerProperty);
+ newProperties.put(ContentModel.PROP_OWNER, AuthenticationUtil.getCurrentUserName());
// Set the aspects and properties
- nodeDaoService.addNodeProperties(nodeId, newPropertyValues);
+ nodeDaoService.addNodeProperties(nodeId, newProperties);
nodeDaoService.addNodeAspects(nodeId, newAspects);
// move the node
@@ -1760,18 +1724,17 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
Long archivedNodeId = archivedNodePair.getFirst();
Set existingAspects = nodeDaoService.getNodeAspects(archivedNodeId);
Set newAspects = new HashSet(5);
- Map existingPropertyValues = nodeDaoService.getNodeProperties(archivedNodeId);
- Map newPropertyValues = new HashMap(11);
+ Map existingProperties = nodeDaoService.getNodeProperties(archivedNodeId);
+ Map newProperties = new HashMap(11);
// the node must be a top-level archive node
if (!existingAspects.contains(ContentModel.ASPECT_ARCHIVED))
{
throw new AlfrescoRuntimeException("The node to restore is not an archive node");
}
- ChildAssociationRef originalPrimaryParentAssocRef = (ChildAssociationRef) makeSerializableValue(
- dictionaryService.getProperty(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC),
- existingPropertyValues.get(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC));
- PropertyValue originalOwnerProperty = existingPropertyValues.get(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER);
+ ChildAssociationRef originalPrimaryParentAssocRef = (ChildAssociationRef) existingProperties.get(
+ ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC);
+ Serializable originalOwner = existingProperties.get(ContentModel.PROP_ARCHIVED_ORIGINAL_OWNER);
// remove the archived aspect
Set removePropertyQNames = new HashSet(11);
removePropertyQNames.add(ContentModel.PROP_ARCHIVED_ORIGINAL_PARENT_ASSOC);
@@ -1782,10 +1745,10 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
nodeDaoService.removeNodeAspects(archivedNodeId, Collections.singleton(ContentModel.ASPECT_ARCHIVED));
// restore the original ownership
- if (originalOwnerProperty != null)
+ if (originalOwner != null)
{
newAspects.add(ContentModel.ASPECT_OWNABLE);
- newPropertyValues.put(ContentModel.PROP_OWNER, originalOwnerProperty);
+ newProperties.put(ContentModel.PROP_OWNER, originalOwner);
}
if (destinationParentNodeRef == null)
@@ -2100,27 +2063,19 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl
}
}
- private String extractNameProperty(Map propertyValues)
+ private String extractNameProperty(Map properties)
{
- PropertyValue nameValue = propertyValues.get(ContentModel.PROP_NAME);
- if (nameValue == null)
- {
- return null;
- }
- String name = (String) nameValue.getValue(DataTypeDefinition.TEXT);
+ Serializable nameValue = properties.get(ContentModel.PROP_NAME);
+ String name = (String) DefaultTypeConverter.INSTANCE.convert(String.class, nameValue);
return name;
}
private void setChildNameUnique(Pair childAssocPair, Pair childNodePair)
{
// Get the node's existing name
- PropertyValue namePropertyValue = nodeDaoService.getNodeProperty(childNodePair.getFirst(), ContentModel.PROP_NAME);
- String nameValue = null;
- if (namePropertyValue != null)
- {
- nameValue = (String) namePropertyValue.getValue(DataTypeDefinition.TEXT);
- }
- setChildNameUnique(childAssocPair, nameValue, null);
+ Serializable nameValue = nodeDaoService.getNodeProperty(childNodePair.getFirst(), ContentModel.PROP_NAME);
+ String name = (String) DefaultTypeConverter.INSTANCE.convert(String.class, nameValue);
+ setChildNameUnique(childAssocPair, name, null);
}
/**
diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java
index 47633e8f1b..6c4d5fa83e 100644
--- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java
+++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImplTest.java
@@ -26,6 +26,7 @@ package org.alfresco.repo.node.db;
import java.io.Serializable;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Locale;
@@ -33,11 +34,11 @@ import java.util.Map;
import javax.transaction.UserTransaction;
+import org.alfresco.i18n.I18NUtil;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.content.MimetypeMap;
import org.alfresco.repo.domain.ChildAssoc;
import org.alfresco.repo.domain.Node;
-import org.alfresco.repo.domain.NodeStatus;
import org.alfresco.repo.node.BaseNodeServiceTest;
import org.alfresco.repo.node.StoreArchiveMap;
import org.alfresco.repo.node.db.NodeDaoService.NodePropertyHandler;
@@ -352,6 +353,38 @@ public class DbNodeServiceImplTest extends BaseNodeServiceTest
mlTextProperty,
propertiesDirect.get(BaseNodeServiceTest.PROP_QNAME_ML_TEXT_VALUE));
}
+
+ /**
+ * Ensure that plain strings going into MLText properties is handled
+ */
+ @SuppressWarnings("unchecked")
+ public void testStringIntoMLTextProperty() throws Exception
+ {
+ String text = "Hello";
+ nodeService.setProperty(rootNodeRef, PROP_QNAME_ML_TEXT_VALUE, text);
+ Serializable mlTextCheck = nodeService.getProperty(rootNodeRef, PROP_QNAME_ML_TEXT_VALUE);
+ assertTrue("Plain string insertion should be returned as MLText", mlTextCheck instanceof MLText);
+ Locale defaultLocale = I18NUtil.getLocale();
+ MLText mlTextCheck2 = (MLText) mlTextCheck;
+ String mlTextDefaultCheck = mlTextCheck2.getDefaultValue();
+ assertEquals("Default MLText value was not set correctly", text, mlTextDefaultCheck);
+
+ // Reset the property
+ nodeService.setProperty(rootNodeRef, PROP_QNAME_ML_TEXT_VALUE, null);
+ Serializable nullValueCheck = nodeService.getProperty(rootNodeRef, PROP_QNAME_ML_TEXT_VALUE);
+
+ // Now, just pass a String in
+ nodeService.setProperty(rootNodeRef, PROP_QNAME_ML_TEXT_VALUE, text);
+ // Now update the property with some MLText
+ MLText mlText = new MLText();
+ mlText.addValue(Locale.ENGLISH, "Very good!");
+ mlText.addValue(Locale.FRENCH, "Très bon!");
+ mlText.addValue(Locale.GERMAN, "Sehr gut!");
+ nodeService.setProperty(rootNodeRef, PROP_QNAME_ML_TEXT_VALUE, mlText);
+ // Get it back and check
+ mlTextCheck = nodeService.getProperty(rootNodeRef, PROP_QNAME_ML_TEXT_VALUE);
+ assertEquals("Setting of MLText over String failed.", mlText, mlTextCheck);
+ }
public void testDuplicatePrimaryParentHandling() throws Exception
{
diff --git a/source/java/org/alfresco/repo/node/db/NodeDaoService.java b/source/java/org/alfresco/repo/node/db/NodeDaoService.java
index 64d08c3bd6..a982f8bc62 100644
--- a/source/java/org/alfresco/repo/node/db/NodeDaoService.java
+++ b/source/java/org/alfresco/repo/node/db/NodeDaoService.java
@@ -32,7 +32,6 @@ import java.util.Set;
import org.alfresco.repo.domain.ChildAssoc;
import org.alfresco.repo.domain.NodeAssoc;
-import org.alfresco.repo.domain.PropertyValue;
import org.alfresco.repo.domain.Transaction;
import org.alfresco.repo.domain.hibernate.DirtySessionAnnotation;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
@@ -132,22 +131,22 @@ public interface NodeDaoService
public void updateNode(Long nodeId, StoreRef storeRef, String uuid, QName nodeTypeQName);
@DirtySessionAnnotation(markDirty=false)
- public PropertyValue getNodeProperty(Long nodeId, QName propertyQName);
+ public Serializable getNodeProperty(Long nodeId, QName propertyQName);
@DirtySessionAnnotation(markDirty=false)
- public Map getNodeProperties(Long nodeId);
+ public Map getNodeProperties(Long nodeId);
@DirtySessionAnnotation(markDirty=true)
- public void addNodeProperty(Long nodeId, QName qname, PropertyValue propertyValue);
+ public void addNodeProperty(Long nodeId, QName qname, Serializable value);
@DirtySessionAnnotation(markDirty=true)
- public void addNodeProperties(Long nodeId, Map properties);
+ public void addNodeProperties(Long nodeId, Map properties);
@DirtySessionAnnotation(markDirty=true)
public void removeNodeProperties(Long nodeId, Set propertyQNames);
@DirtySessionAnnotation(markDirty=true)
- public void setNodeProperties(Long nodeId, Map properties);
+ public void setNodeProperties(Long nodeId, Map properties);
@DirtySessionAnnotation(markDirty=false)
public Set getNodeAspects(Long nodeId);
@@ -399,17 +398,6 @@ public interface NodeDaoService
@DirtySessionAnnotation(markDirty=true)
public void getPropertyValuesByActualType(DataTypeDefinition actualDataTypeDefinition, NodePropertyHandler handler);
- /**
- * @return Returns the total number of nodes in the ADM repository
- */
- @DirtySessionAnnotation(markDirty=false)
- public int getNodeCount();
- /**
- * @return Returns the total number of nodes in the ADM store
- */
- @DirtySessionAnnotation(markDirty=false)
- public int getNodeCount(final StoreRef storeRef);
-
/**
* Iterface to handle callbacks when iterating over properties
*
diff --git a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java
index c804bc431a..db13bc6f91 100644
--- a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java
+++ b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java
@@ -25,34 +25,41 @@
package org.alfresco.repo.node.db.hibernate;
import java.io.Serializable;
+import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.UnknownHostException;
-import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
import java.util.zip.CRC32;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.repo.cache.SimpleCache;
+import org.alfresco.repo.domain.AuditableProperties;
import org.alfresco.repo.domain.ChildAssoc;
import org.alfresco.repo.domain.DbAccessControlList;
+import org.alfresco.repo.domain.LocaleDAO;
import org.alfresco.repo.domain.NamespaceEntity;
import org.alfresco.repo.domain.Node;
import org.alfresco.repo.domain.NodeAssoc;
-import org.alfresco.repo.domain.NodeKey;
-import org.alfresco.repo.domain.NodeStatus;
+import org.alfresco.repo.domain.NodePropertyValue;
+import org.alfresco.repo.domain.PropertyMapKey;
import org.alfresco.repo.domain.PropertyValue;
import org.alfresco.repo.domain.QNameDAO;
import org.alfresco.repo.domain.QNameEntity;
import org.alfresco.repo.domain.Server;
import org.alfresco.repo.domain.Store;
-import org.alfresco.repo.domain.StoreKey;
import org.alfresco.repo.domain.Transaction;
import org.alfresco.repo.domain.UsageDeltaDAO;
import org.alfresco.repo.domain.hibernate.ChildAssocImpl;
@@ -61,7 +68,6 @@ import org.alfresco.repo.domain.hibernate.DbAccessControlListImpl;
import org.alfresco.repo.domain.hibernate.DirtySessionMethodInterceptor;
import org.alfresco.repo.domain.hibernate.NodeAssocImpl;
import org.alfresco.repo.domain.hibernate.NodeImpl;
-import org.alfresco.repo.domain.hibernate.NodeStatusImpl;
import org.alfresco.repo.domain.hibernate.ServerImpl;
import org.alfresco.repo.domain.hibernate.StoreImpl;
import org.alfresco.repo.domain.hibernate.TransactionImpl;
@@ -71,34 +77,39 @@ import org.alfresco.repo.security.permissions.AccessControlListProperties;
import org.alfresco.repo.security.permissions.SimpleAccessControlListProperties;
import org.alfresco.repo.security.permissions.impl.AclChange;
import org.alfresco.repo.security.permissions.impl.AclDaoComponent;
+import org.alfresco.repo.security.authentication.AuthenticationUtil;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.repo.transaction.TransactionAwareSingleton;
import org.alfresco.repo.transaction.TransactionalDao;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
+import org.alfresco.service.cmr.dictionary.DictionaryException;
+import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.InvalidTypeException;
+import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.repository.AssociationExistsException;
import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException;
+import org.alfresco.service.cmr.repository.EntityRef;
+import org.alfresco.service.cmr.repository.InvalidNodeRefException;
import org.alfresco.service.cmr.repository.InvalidStoreRefException;
+import org.alfresco.service.cmr.repository.MLText;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreExistsException;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter;
+import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
import org.alfresco.service.cmr.repository.datatype.TypeConverter;
import org.alfresco.service.namespace.QName;
+import org.alfresco.util.EqualsHelper;
import org.alfresco.util.GUID;
import org.alfresco.util.Pair;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.hibernate.HibernateException;
-import org.hibernate.ObjectDeletedException;
import org.hibernate.Query;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
-import org.hibernate.StaleStateException;
-import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
@@ -110,7 +121,9 @@ import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
*/
public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements NodeDaoService, TransactionalDao
{
+ private static final String QUERY_GET_STORE_BY_ALL = "store.GetStoreByAll";
private static final String QUERY_GET_ALL_STORES = "store.GetAllStores";
+ private static final String QUERY_GET_NODE_BY_STORE_ID_AND_UUID = "node.GetNodeByStoreIdAndUuid";
private static final String QUERY_GET_CHILD_NODE_IDS = "node.GetChildNodeIds";
private static final String QUERY_GET_CHILD_ASSOCS_BY_ALL = "node.GetChildAssocsByAll";
private static final String QUERY_GET_CHILD_ASSOC_BY_TYPE_AND_NAME = "node.GetChildAssocByTypeAndName";
@@ -130,10 +143,9 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private static final String QUERY_GET_NODES_WITH_PROPERTY_VALUES_BY_STRING_AND_STORE = "node.GetNodesWithPropertyValuesByStringAndStore";
private static final String QUERY_GET_NODES_WITH_PROPERTY_VALUES_BY_ACTUAL_TYPE = "node.GetNodesWithPropertyValuesByActualType";
private static final String QUERY_GET_SERVER_BY_IPADDRESS = "server.getServerByIpAddress";
-
- private static final String QUERY_GET_NODE_COUNT = "node.GetNodeCount";
- private static final String QUERY_GET_NODE_COUNT_FOR_STORE = "node.GetNodeCountForStore";
+ private static final Long NULL_CACHE_VALUE = new Long(-1);
+
private static Log logger = LogFactory.getLog(HibernateNodeDaoServiceImpl.class);
/** Log to trace parent association caching: classname + .ParentAssocsCache */
private static Log loggerParentAssocsCache = LogFactory.getLog(HibernateNodeDaoServiceImpl.class.getName() + ".ParentAssocsCache");
@@ -141,6 +153,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private QNameDAO qnameDAO;
private UsageDeltaDAO usageDeltaDAO;
private AclDaoComponent aclDaoComponent;
+ private LocaleDAO localeDAO;
+ private DictionaryService dictionaryService;
+ /** A cache mapping StoreRef and NodeRef instances to the entity IDs (primary key) */
+ private SimpleCache storeAndNodeIdCache;
/** A cache for more performant lookups of the parent associations */
private SimpleCache> parentAssocsCache;
private boolean isDebugEnabled = logger.isDebugEnabled();
@@ -216,6 +232,32 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
this.aclDaoComponent = aclDaoComponent;
}
+ /**
+ * Set the component for creating Locale entities
+ */
+ public void setLocaleDAO(LocaleDAO localeDAO)
+ {
+ this.localeDAO = localeDAO;
+ }
+
+ /**
+ * Set the component for querying the dictionary model
+ */
+ public void setDictionaryService(DictionaryService dictionaryService)
+ {
+ this.dictionaryService = dictionaryService;
+ }
+
+ /**
+ * Ste the transaction-aware cache to store Store and Root Node IDs by Store Reference
+ *
+ * @param storeAndNodeIdCache the cache
+ */
+ public void setStoreAndNodeIdCache(SimpleCache storeAndNodeIdCache)
+ {
+ this.storeAndNodeIdCache = storeAndNodeIdCache;
+ }
+
/**
* Set the transaction-aware cache to store parent associations by child node id
*
@@ -398,18 +440,66 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
getSession().flush();
}
- private Store getStore(StoreRef storeRef)
+ /**
+ * @return Returns the Store entity or null
+ */
+ private Store getStore(final StoreRef storeRef)
{
- StoreKey storeKey = new StoreKey(storeRef);
- Store store = (Store) getHibernateTemplate().get(StoreImpl.class, storeKey);
+ // Look it up in the cache
+ Long storeId = storeAndNodeIdCache.get(storeRef);
+ // Load it
+ if (storeId != null)
+ {
+ // Check for null persistence (previously missed value)
+ if (storeId.equals(NULL_CACHE_VALUE))
+ {
+ // There is no such value matching
+ return null;
+ }
+ // Don't use the method that throws an exception as the cache might be invalid.
+ Store store = (Store) getSession().get(StoreImpl.class, storeId);
+ if (store == null)
+ {
+ // It is not available, so we need to go the query route.
+ // But first remove the cache entry
+ storeAndNodeIdCache.remove(storeRef);
+ // Recurse, but this time there is no cache entry
+ return getStore(storeRef);
+ }
+ else
+ {
+ return store;
+ }
+ }
+ // Query for it
+ HibernateCallback callback = new HibernateCallback()
+ {
+ public Object doInHibernate(Session session)
+ {
+ Query query = session
+ .getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_STORE_BY_ALL)
+ .setString("protocol", storeRef.getProtocol())
+ .setString("identifier", storeRef.getIdentifier());
+ return query.uniqueResult();
+ }
+ };
+ Store store = (Store) getHibernateTemplate().execute(callback);
+ if (store == null)
+ {
+ // Persist the null entry
+ storeAndNodeIdCache.put(storeRef, NULL_CACHE_VALUE);
+ }
+ else
+ {
+ storeAndNodeIdCache.put(storeRef, store.getId());
+ }
// done
return store;
}
private Store getStoreNotNull(StoreRef storeRef)
{
- StoreKey storeKey = new StoreKey(storeRef);
- Store store = (Store) getHibernateTemplate().get(StoreImpl.class, storeKey);
+ Store store = getStore(storeRef);
if (store == null)
{
throw new InvalidStoreRefException(storeRef);
@@ -525,9 +615,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
store = new StoreImpl();
- // set key
- store.setKey(new StoreKey(storeRef));
- // persist so that it is present in the hibernate cache
+ // set key values
+ store.setProtocol(storeRef.getProtocol());
+ store.setIdentifier(storeRef.getIdentifier());
+ // The root node may be null exactly because the Store needs an ID before it can be assigned to a node
getHibernateTemplate().save(store);
// create and assign a root node
Node rootNode = newNode(
@@ -545,186 +636,236 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
DbAccessControlList acl = aclDaoComponent.getDbAccessControlList(id);
rootNode.setAccessControlList(acl);
- // done
+ // Cache the value
+ storeAndNodeIdCache.put(storeRef, store.getId());
+ // Done
return new Pair(rootNode.getId(), rootNode.getNodeRef());
}
public NodeRef.Status getNodeRefStatus(NodeRef nodeRef)
{
- NodeStatus nodeStatus = getNodeStatusOrNull(nodeRef);
- if (nodeStatus == null) // node never existed
+ // Get the store
+ StoreRef storeRef = nodeRef.getStoreRef();
+ Store store = getStore(storeRef);
+ if (store == null)
+ {
+ // No such store therefore no such node reference
+ return null;
+ }
+ Node node = getNodeOrNull(store, nodeRef.getId());
+ if (node == null) // node never existed
{
return null;
}
else
{
return new NodeRef.Status(
- nodeStatus.getTransaction().getChangeTxnId(),
- nodeStatus.isDeleted());
+ node.getTransaction().getChangeTxnId(),
+ node.getDeleted());
}
}
- private NodeStatus getNodeStatusOrNull(NodeRef nodeRef)
+ private Node getNodeOrNull(final Store store, final String uuid)
{
- NodeKey nodeKey = new NodeKey(nodeRef);
- NodeStatus status = null;
- try
+ NodeRef nodeRef = new NodeRef(store.getStoreRef(), uuid);
+ // Look it up in the cache
+ Long nodeId = storeAndNodeIdCache.get(nodeRef);
+ // Load it
+ if (nodeId != null)
{
- status = (NodeStatus) getHibernateTemplate().get(NodeStatusImpl.class, nodeKey);
- }
- catch (DataAccessException e)
- {
- if (e.contains(ObjectDeletedException.class))
+ // Check for null persistence (previously missed value)
+ if (nodeId.equals(NULL_CACHE_VALUE))
{
- throw new StaleStateException("Node status was deleted: " + nodeKey);
+ // There is no such value matching
+ return null;
+ }
+ // Don't use the method that throws an exception as the cache might be invalid.
+ Node node = (Node) getSession().get(NodeImpl.class, nodeId);
+ if (node == null)
+ {
+ // It is not available, so we need to go the query route.
+ // But first remove the cache entry
+ storeAndNodeIdCache.remove(nodeRef);
+ // Recurse, but this time there is no cache entry
+ return getNodeOrNull(store, uuid);
+ }
+ else
+ {
+ return node;
}
- throw e;
}
- return status;
- }
-
- private void recordNodeUpdate(Node node)
- {
- NodeRef nodeRef = node.getNodeRef();
- Transaction currentTxn = getCurrentTransaction();
- NodeStatus status = getNodeStatusOrNull(nodeRef);
- if (status == null)
+ // Query for it
+ HibernateCallback callback = new HibernateCallback()
{
- NodeKey key = new NodeKey(nodeRef);
- // We need to to create a status entry for it
- status = new NodeStatusImpl();
- status.setKey(key);
- status.setNode(node);
- status.setTransaction(currentTxn);
- getHibernateTemplate().save(status);
+ public Object doInHibernate(Session session)
+ {
+ Query query = session
+ .getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_NODE_BY_STORE_ID_AND_UUID)
+ .setLong("storeId", store.getId())
+ .setString("uuid", uuid);
+ DirtySessionMethodInterceptor.setQueryFlushMode(session, query);
+ return query.uniqueResult();
+ }
+ };
+ Node node = (Node) getHibernateTemplate().execute(callback);
+ // Cache the value
+ if (node == null)
+ {
+ storeAndNodeIdCache.put(nodeRef, NULL_CACHE_VALUE);
}
else
{
- status.setNode(node);
- status.setTransaction(currentTxn);
+ storeAndNodeIdCache.put(nodeRef, node.getId());
}
+ // TODO: Fill cache here
+ return node;
+ }
+
+ private void updateNodeStatus(Node node, boolean deleted)
+ {
+ Transaction currentTxn = getCurrentTransaction();
+ // Update it if required
+ if (!EqualsHelper.nullSafeEquals(node.getTransaction(), currentTxn))
+ {
+ // Txn has changed
+ DirtySessionMethodInterceptor.setSessionDirty();
+ node.setTransaction(currentTxn);
+ }
+ if (node.getDeleted() != deleted)
+ {
+ DirtySessionMethodInterceptor.setSessionDirty();
+ node.setDeleted(deleted);
+ }
+ }
+
+ private static final String UNKOWN_USER = "unkown";
+ private String getCurrentUser()
+ {
+ String user = AuthenticationUtil.getCurrentUserName();
+ return (user == null) ? UNKOWN_USER : user;
+ }
+
+ private void recordNodeCreate(Node node)
+ {
+ updateNodeStatus(node, false);
+ // Handle cm:auditable
+ String currentUser = getCurrentUser();
+ Date currentDate = new Date();
+ AuditableProperties auditableProperties = node.getAuditableProperties();
+ auditableProperties.setAuditValues(currentUser, currentDate, true);
}
- private void recordNodeDelete(NodeRef nodeRef)
+ private void recordNodeUpdate(Node node)
{
- Transaction currentTxn = getCurrentTransaction();
- NodeStatus status = getNodeStatusOrNull(nodeRef);
- if (status == null)
- {
- NodeKey key = new NodeKey(nodeRef);
- // We need to to create a status entry for it
- status = new NodeStatusImpl();
- status.setKey(key);
- status.setNode(null);
- status.setTransaction(currentTxn);
- getHibernateTemplate().save(status);
- }
- else
- {
- status.setNode(null);
- status.setTransaction(currentTxn);
- }
+ updateNodeStatus(node, false);
+ // Handle cm:auditable
+ String currentUser = getCurrentUser();
+ Date currentDate = new Date();
+ AuditableProperties auditableProperties = node.getAuditableProperties();
+ auditableProperties.setAuditValues(currentUser, currentDate, false);
+ }
+
+ private void recordNodeDelete(Node node)
+ {
+ updateNodeStatus(node, true);
+ // Handle cm:auditable
+ String currentUser = getCurrentUser();
+ Date currentDate = new Date();
+ AuditableProperties auditableProperties = node.getAuditableProperties();
+ auditableProperties.setAuditValues(currentUser, currentDate, false);
}
public Pair newNode(StoreRef storeRef, String uuid, QName nodeTypeQName) throws InvalidTypeException
{
- Store store = (Store) getHibernateTemplate().load(StoreImpl.class, new StoreKey(storeRef));
+ Store store = (Store) getStoreNotNull(storeRef);
Node newNode = newNode(store, uuid, nodeTypeQName);
- return new Pair(newNode.getId(), newNode.getNodeRef());
+ Long nodeId = newNode.getId();
+ NodeRef nodeRef = newNode.getNodeRef();
+ return new Pair(nodeId, nodeRef);
}
private Node newNode(Store store, String uuid, QName nodeTypeQName) throws InvalidTypeException
{
- NodeKey key = new NodeKey(store.getKey(), uuid);
-
- // create (or reuse) the mandatory node status
- NodeStatus status = (NodeStatus) getHibernateTemplate().get(NodeStatusImpl.class, key);
- if (status != null)
- {
- // The node existed at some point.
- // Although unlikely, it is possible that the node was deleted in this transaction.
- // If that is the case, then the session has to be flushed so that the database
- // constraints aren't violated as the node creation will write to the database to
- // get an ID
- if (status.getTransaction().getChangeTxnId().equals(AlfrescoTransactionSupport.getTransactionId()))
- {
- // flush
- HibernateCallback callback = new HibernateCallback()
- {
- public Object doInHibernate(Session session) throws HibernateException, SQLException
- {
- DirtySessionMethodInterceptor.flushSession(session);
- return null;
- }
- };
- getHibernateTemplate().execute(callback);
- }
- }
-
// Get the qname for the node type
QNameEntity nodeTypeQNameEntity = qnameDAO.getOrCreateQNameEntity(nodeTypeQName);
- // build a concrete node based on a bootstrap type
- Node node = new NodeImpl();
- // set other required properties
- node.setStore(store);
- node.setUuid(uuid);
- node.setTypeQName(nodeTypeQNameEntity);
- // persist the node
- getHibernateTemplate().save(node);
-
- // Record change ID
- recordNodeUpdate(node);
+ // Get any existing Node. A node with this UUID may have existed before, but must be marked
+ // deleted; otherwise it will be considered live and valid
+ Node node = getNodeOrNull(store, uuid);
+ // If there is already a node attached, then there is a clash
+ if (node != null)
+ {
+ if (!node.getDeleted())
+ {
+ throw new InvalidNodeRefException("Live Node exists: " + node.getNodeRef(), node.getNodeRef());
+ }
+ // Set clean values
+ node.setTypeQName(nodeTypeQNameEntity);
+ node.setDeleted(false);
+ node.setAccessControlList(null);
+ // Record node change
+ recordNodeCreate(node);
+ }
+ else
+ {
+ // There is no existing node, deleted or otherwise.
+ node = new NodeImpl();
+ node.setStore(store);
+ node.setUuid(uuid);
+ node.setTypeQName(nodeTypeQNameEntity);
+ node.setDeleted(false);
+ node.setAccessControlList(null);
+ // Record node change
+ recordNodeCreate(node);
+ // Persist it
+ getHibernateTemplate().save(node);
+
+ // Update the cache
+ storeAndNodeIdCache.put(node.getNodeRef(), node.getId());
+ }
- // done
+ // Done
return node;
}
+ /**
+ * This moves the entire node, ensuring that a trail is left behind. It is more
+ * efficient to move the node and recreate a deleted node in it's wake because of
+ * the other properties and aspects that need to go with the node.
+ */
public Pair moveNodeToStore(Long nodeId, StoreRef storeRef)
{
- Store store = getStoreNotNull(storeRef);
Node node = getNodeNotNull(nodeId);
- // Only do anything if the store has changed
- Store oldStore = node.getStore();
- if (oldStore.getKey().equals(store.getKey()))
- {
- // No change
- return new Pair(node.getId(), node.getNodeRef());
- }
- NodeRef oldNodeRef = node.getNodeRef();
+ // Update the node
+ updateNode(nodeId, storeRef, null, null);
+ NodeRef nodeRef = node.getNodeRef();
- // Set the store
- node.setStore(store);
-
- // Record change ID
- recordNodeDelete(oldNodeRef);
- recordNodeUpdate(node);
-
- return new Pair(node.getId(), node.getNodeRef());
+ return new Pair(node.getId(), nodeRef);
}
public Pair getNodePair(NodeRef nodeRef)
{
- // get it via the node status
- NodeStatus status = getNodeStatusOrNull(nodeRef);
- if (status == null)
+ Store store = getStore(nodeRef.getStoreRef());
+ if (store == null)
{
- // no status implies no node
+ return null;
+ }
+ // Get the node: none, deleted or live
+ Node node = getNodeOrNull(store, nodeRef.getId());
+ if (node == null)
+ {
+ // The node doesn't exist even as a deleted reference
+ return null;
+ }
+ else if (node.getDeleted())
+ {
+ // The reference exists, but only as a deleted node
return null;
}
else
{
- // a status may have a node
- Node node = status.getNode();
- // The node might be null (a deleted node)
- if (node != null)
- {
- return new Pair(node.getId(), nodeRef);
- }
- else
- {
- return null;
- }
+ // The node is live
+ return new Pair(node.getId(), nodeRef);
}
}
@@ -735,6 +876,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
return null;
}
+ else if (node.getDeleted())
+ {
+ // The node reference exists, but it is officially deleted
+ return null;
+ }
else
{
return new Pair(nodeId, node.getNodeRef());
@@ -786,132 +932,277 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
}
- public void updateNode(Long nodeId, StoreRef storeRef, String uuid, QName nodeTypeQName)
+ public void updateNode(Long nodeId, StoreRef storeRefAfter, String uuidAfter, QName nodeTypeQName)
{
Node node = getNodeNotNull(nodeId);
+ Store storeBefore = node.getStore();
+ String uuidBefore = node.getUuid();
NodeRef nodeRefBefore = node.getNodeRef();
- if (storeRef != null && storeRef.equals(node.getStore().getStoreRef()))
- {
- Store store = getStoreNotNull(storeRef);
- node.setStore(store);
- }
- if (uuid != null)
- {
- node.setUuid(uuid);
- }
- if (nodeTypeQName != null)
- {
- QNameEntity nodeTypeQNameEntity = qnameDAO.getOrCreateQNameEntity(nodeTypeQName);
- node.setTypeQName(nodeTypeQNameEntity);
- }
- NodeRef nodeRefAfter = node.getNodeRef();
- // Record change ID
- if (nodeRefBefore.equals(nodeRefAfter))
+ final Store storeAfter;
+ if (storeRefAfter == null)
{
- recordNodeUpdate(node);
+ storeAfter = storeBefore;
+ storeRefAfter = storeBefore.getStoreRef();
}
else
{
- recordNodeDelete(nodeRefBefore);
+ storeAfter = getStoreNotNull(storeRefAfter);
+ }
+ if (uuidAfter == null)
+ {
+ uuidAfter = uuidBefore;
+ }
+
+ NodeRef nodeRefAfter = new NodeRef(storeRefAfter, uuidAfter);
+ if (!nodeRefAfter.equals(nodeRefBefore))
+ {
+ Node conflictingNode = getNodeOrNull(storeAfter, uuidAfter);
+ if (conflictingNode != null)
+ {
+ if (!conflictingNode.getDeleted())
+ {
+ throw new InvalidNodeRefException("Live Node exists: " + node.getNodeRef(), node.getNodeRef());
+ }
+ // It is a deleted node so just remove the conflict
+ getHibernateTemplate().delete(conflictingNode);
+ // Flush immediately to ensure that the record is deleted
+ DirtySessionMethodInterceptor.flushSession(getSession(), true);
+ // The cache entry will be overwritten so we don't need to do it here
+ }
+
+ // Change the store
+ node.setStore(storeAfter);
+ node.setUuid(uuidAfter);
+ // We will need to record the change for the new node
+ recordNodeUpdate(node);
+ // Flush immediately to ensure that the record changes
+ DirtySessionMethodInterceptor.flushSession(getSession(), true);
+
+ // We need to create a dummy reference for the node that was just moved away
+ Node oldNodeDummy = new NodeImpl();
+ oldNodeDummy.setStore(storeBefore);
+ oldNodeDummy.setUuid(uuidBefore);
+ oldNodeDummy.setTypeQName(node.getTypeQName());
+ recordNodeDelete(oldNodeDummy);
+ // Persist
+ getHibernateTemplate().save(oldNodeDummy);
+
+ // Update cache entries
+ NodeRef nodeRef = node.getNodeRef();
+ storeAndNodeIdCache.put(nodeRef, node.getId());
+ storeAndNodeIdCache.put(nodeRefBefore, oldNodeDummy.getId());
+ }
+
+ if (nodeTypeQName != null && !nodeTypeQName.equals(node.getTypeQName().getQName()))
+ {
+ QNameEntity nodeTypeQNameEntity = qnameDAO.getOrCreateQNameEntity(nodeTypeQName);
+ node.setTypeQName(nodeTypeQNameEntity);
+ // We will need to record the change
recordNodeUpdate(node);
}
}
- public PropertyValue getNodeProperty(Long nodeId, QName propertyQName)
+ public Serializable getNodeProperty(Long nodeId, QName propertyQName)
{
+ Node node = getNodeNotNull(nodeId);
+
+ // Handle cm:auditable
+ if (AuditableProperties.isAuditableProperty(propertyQName))
+ {
+ AuditableProperties auditableProperties = node.getAuditableProperties();
+ return auditableProperties.getAuditableProperty(propertyQName);
+ }
+
QNameEntity propertyQNameEntity = qnameDAO.getQNameEntity(propertyQName);
if (propertyQNameEntity == null)
{
return null;
}
- Node node = getNodeNotNull(nodeId);
- Map nodeProperties = node.getProperties();
- return nodeProperties.get(propertyQNameEntity.getId());
+ Map nodeProperties = node.getProperties();
+ Serializable propertyValue = HibernateNodeDaoServiceImpl.getPublicProperty(
+ nodeProperties,
+ propertyQName,
+ qnameDAO, localeDAO, dictionaryService);
+ return propertyValue;
}
- public Map getNodeProperties(Long nodeId)
+ public Map getNodeProperties(Long nodeId)
{
Node node = getNodeNotNull(nodeId);
- Map nodeProperties = node.getProperties();
+ Map nodeProperties = node.getProperties();
// Convert the QName IDs
- Map converted = new HashMap(nodeProperties.size(), 1.0F);
- for (Map.Entry entry : nodeProperties.entrySet())
- {
- Long qnameEntityId = entry.getKey();
- QName qname = qnameDAO.getQName(qnameEntityId);
- converted.put(qname, entry.getValue());
- }
+ Map converted = HibernateNodeDaoServiceImpl.convertToPublicProperties(
+ nodeProperties,
+ qnameDAO,
+ localeDAO,
+ dictionaryService);
- // Make immutable
+ // Handle cm:auditable
+ AuditableProperties auditableProperties = node.getAuditableProperties();
+ converted.putAll(auditableProperties.getAuditableProperties());
+
+ // Done
return converted;
}
- public void addNodeProperty(Long nodeId, QName qname, PropertyValue propertyValue)
+ private void addNodePropertyImpl(Node node, QName qname, Serializable value, Long localeId)
+ {
+ // Handle cm:auditable
+ if (AuditableProperties.isAuditableProperty(qname))
+ {
+ // This is never set manually
+ return;
+ }
+
+ PropertyDefinition propertyDef = dictionaryService.getProperty(qname);
+ Long qnameId = qnameDAO.getOrCreateQNameEntity(qname).getId();
+
+ Map persistableProperties = new HashMap(3);
+
+ HibernateNodeDaoServiceImpl.addValueToPersistedProperties(
+ persistableProperties,
+ propertyDef,
+ (short)-1,
+ qnameId,
+ localeId,
+ value,
+ localeDAO);
+
+ Map nodeProperties = node.getProperties();
+
+ Iterator oldPropertyKeysIterator = nodeProperties.keySet().iterator();
+ while (oldPropertyKeysIterator.hasNext())
+ {
+ PropertyMapKey oldPropertyKey = oldPropertyKeysIterator.next();
+ // If the qname doesn't match, then ignore
+ if (!oldPropertyKey.getQnameId().equals(qnameId))
+ {
+ continue;
+ }
+ // The qname matches, but is the key present in the new values
+ if (persistableProperties.containsKey(oldPropertyKey))
+ {
+ // The key is present in both maps so it'll be updated
+ continue;
+ }
+ // Remove the entry from the node's properties
+ oldPropertyKeysIterator.remove();
+ }
+
+ // Now add all the new properties. The will overwrite and/or add values.
+ nodeProperties.putAll(persistableProperties);
+ }
+
+ public void addNodeProperty(Long nodeId, QName qname, Serializable propertyValue)
{
Node node = getNodeNotNull(nodeId);
- QNameEntity qnameEntity = qnameDAO.getOrCreateQNameEntity(qname);
- Map nodeProperties = node.getProperties();
- nodeProperties.put(qnameEntity.getId(), propertyValue);
+ Long localeId = localeDAO.getOrCreateDefaultLocalePair().getFirst();
+ addNodePropertyImpl(node, qname, propertyValue, localeId);
// Record change ID
recordNodeUpdate(node);
}
-
- public void addNodeProperties(Long nodeId, Map properties)
+
+ @SuppressWarnings("unchecked")
+ public void addNodeProperties(Long nodeId, Map properties)
{
Node node = getNodeNotNull(nodeId);
- Map nodeProperties = node.getProperties();
- for (Map.Entry entry : properties.entrySet())
+ Long localeId = localeDAO.getOrCreateDefaultLocalePair().getFirst();
+ for (Map.Entry entry : properties.entrySet())
{
- QNameEntity qnameEntity = qnameDAO.getOrCreateQNameEntity(entry.getKey());
- nodeProperties.put(qnameEntity.getId(), entry.getValue());
+ QName qname = entry.getKey();
+ Serializable value = entry.getValue();
+ addNodePropertyImpl(node, qname, value, localeId);
}
// Record change ID
recordNodeUpdate(node);
}
- public void removeNodeProperties(Long nodeId, Set propertyQNames)
+ public void setNodeProperties(Long nodeId, Map propertiesIncl)
{
Node node = getNodeNotNull(nodeId);
- Map nodeProperties = node.getProperties();
-
- for (QName qname : propertyQNames)
+
+ // Handle cm:auditable. These need to be removed from the properties.
+ Map properties = new HashMap(propertiesIncl.size());
+ for (Map.Entry entry : propertiesIncl.entrySet())
{
- QNameEntity qnameEntity = qnameDAO.getOrCreateQNameEntity(qname);
- nodeProperties.remove(qnameEntity.getId());
+ QName propertyQName = entry.getKey();
+ Serializable value = entry.getValue();
+ if (AuditableProperties.isAuditableProperty(propertyQName))
+ {
+ continue;
+ }
+ // The value was NOT an auditable value
+ properties.put(propertyQName, value);
+ }
+
+ // Convert
+ Map persistableProperties = HibernateNodeDaoServiceImpl.convertToPersistentProperties(
+ properties,
+ qnameDAO,
+ localeDAO,
+ dictionaryService);
+
+ // Get the persistent map attached to the node
+ Map nodeProperties = node.getProperties();
+
+ // In order to make as few changes as possible, we need to update existing properties wherever possible.
+ // This means keeping track of map keys that weren't updated
+ Set toRemove = new HashSet(nodeProperties.keySet());
+
+ // Loop over the converted values and update the persisted node properties map
+ for (Map.Entry entry : persistableProperties.entrySet())
+ {
+ PropertyMapKey key = entry.getKey();
+ toRemove.remove(key);
+ // Add the value to the node's map
+ nodeProperties.put(key, entry.getValue());
+ }
+
+ // Now remove all untouched keys
+ for (PropertyMapKey key : toRemove)
+ {
+ nodeProperties.remove(key);
}
// Record change ID
recordNodeUpdate(node);
}
- public void setNodeProperties(Long nodeId, Map properties)
+ public void removeNodeProperties(Long nodeId, Set propertyQNamesIncl)
{
Node node = getNodeNotNull(nodeId);
- Map nodeProperties = node.getProperties();
- nodeProperties.clear();
-
- Set toRemove = new HashSet(nodeProperties.keySet());
-
- for (Map.Entry entry : properties.entrySet())
+ // Handle cm:auditable. These need to be removed from the list.
+ Set propertyQNames = new HashSet(propertyQNamesIncl.size());
+ for (QName propertyQName : propertyQNamesIncl)
{
- QNameEntity qnameEntity = qnameDAO.getOrCreateQNameEntity(entry.getKey());
- Long qnameEntityId = qnameEntity.getId();
- nodeProperties.put(qnameEntityId, entry.getValue());
- // It's live
- toRemove.remove(qnameEntityId);
+ if (AuditableProperties.isAuditableProperty(propertyQName))
+ {
+ continue;
+ }
+ propertyQNames.add(propertyQName);
}
- // Remove all entries that weren't in the updated set
- for (Long qnameEntityIdToRemove : toRemove)
+ Map nodeProperties = node.getProperties();
+
+ Set propertyQNameIds = qnameDAO.convertQNamesToIds(propertyQNames, true);
+
+ // Loop over the current properties and remove any that have the same qname.
+ // Make a copy as we will modify the original map.
+ Set entrySet = new HashSet(nodeProperties.keySet());
+ for (PropertyMapKey propertyMapKey : entrySet)
{
- nodeProperties.remove(qnameEntityIdToRemove);
+ Long propertyQNameId = propertyMapKey.getQnameId();
+ if (propertyQNameIds.contains(propertyQNameId))
+ {
+ nodeProperties.remove(propertyMapKey);
+ }
}
// Record change ID
@@ -933,6 +1224,9 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
// Add sys:referenceable
nodeAspectQNames.add(ContentModel.ASPECT_REFERENCEABLE);
+ // Add cm:auditable
+ nodeAspectQNames.add(ContentModel.ASPECT_AUDITABLE);
+
// Make immutable
return nodeAspectQNames;
}
@@ -941,12 +1235,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Node node = getNodeNotNull(nodeId);
+ aspectQNames = new HashSet(aspectQNames);
// Remove sys:referenceable
- if (aspectQNames.contains(ContentModel.ASPECT_REFERENCEABLE))
- {
- aspectQNames = new HashSet(aspectQNames);
- aspectQNames.remove(ContentModel.ASPECT_REFERENCEABLE);
- }
+ aspectQNames.remove(ContentModel.ASPECT_REFERENCEABLE);
+ // Handle cm:auditable
+ aspectQNames.remove(ContentModel.ASPECT_AUDITABLE);
Set nodeAspects = node.getAspects();
@@ -964,12 +1257,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Node node = getNodeNotNull(nodeId);
+ aspectQNames = new HashSet(aspectQNames);
// Remove sys:referenceable
- if (aspectQNames.contains(ContentModel.ASPECT_REFERENCEABLE))
- {
- aspectQNames = new HashSet(aspectQNames);
- aspectQNames.remove(ContentModel.ASPECT_REFERENCEABLE);
- }
+ aspectQNames.remove(ContentModel.ASPECT_REFERENCEABLE);
+ // Handle cm:auditable
+ aspectQNames.remove(ContentModel.ASPECT_AUDITABLE);
Set nodeAspects = node.getAspects();
@@ -992,6 +1284,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
return true;
}
+ // Shortcut cm:auditable
+ else if (aspectQName.equals(ContentModel.ASPECT_AUDITABLE))
+ {
+ return true;
+ }
QNameEntity aspectQNameEntity = qnameDAO.getQNameEntity(aspectQName);
if (aspectQNameEntity == null)
@@ -1013,7 +1310,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
deleteNodeInternal(node, false, deletedChildAssocIds);
// Record change ID
- recordNodeDelete(node.getNodeRef());
+ recordNodeDelete(node);
}
private static final String QUERY_DELETE_PARENT_ASSOCS = "node.DeleteParentAssocs";
@@ -1021,6 +1318,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private static final String QUERY_DELETE_NODE_ASSOCS = "node.DeleteNodeAssocs";
/**
+ * Does a full cleanup of the node if the deleted flag is off. If
+ * the node is marked as deleted then the cleanup is assumed to be
+ * unnecessary and the node entry itself is cleaned up.
+ *
* @param node the node to delete
* @param cascade true to cascade delete
* @param deletedChildAssocIds previously deleted child associations
@@ -1109,8 +1410,10 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
// Delete deltas
usageDeltaDAO.deleteDeltas(nodeId);
- // finally delete the node
- getHibernateTemplate().delete(node);
+// // finally delete the node
+// getHibernateTemplate().delete(node);
+ node.setDeleted(true);
+
// Remove node from cache
parentAssocsCache.remove(nodeId);
if (isDebugParentAssocCacheEnabled)
@@ -1125,7 +1428,14 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
private long getCrc(String str)
{
CRC32 crc = new CRC32();
- crc.update(str.getBytes());
+ try
+ {
+ crc.update(str.getBytes("UTF-8")); // https://issues.alfresco.com/jira/browse/ALFCOM-1335
+ }
+ catch (UnsupportedEncodingException e)
+ {
+ throw new RuntimeException("UTF-8 encoding is not supported");
+ }
return crc.getValue();
}
@@ -1349,7 +1659,6 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
else
{
- recordNodeDelete(oldChildNodeRef);
recordNodeUpdate(newChildNode);
}
@@ -2332,6 +2641,11 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return;
}
final Long propQNameEntityId = propQNameEntity.getId();
+ final Long defaultLocaleEntityId = localeDAO.getDefaultLocalePair().getFirst();
+ final PropertyMapKey propKey = new PropertyMapKey();
+ propKey.setQnameId(propQNameEntityId);
+ propKey.setLocaleId(defaultLocaleEntityId);
+ propKey.setListIndex((short)0);
// Run the query
HibernateCallback callback = new HibernateCallback()
{
@@ -2341,7 +2655,7 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_NODES_WITH_PROPERTY_VALUES_BY_STRING_AND_STORE)
.setString("protocol", storeRef.getProtocol())
.setString("identifier", storeRef.getIdentifier())
- .setLong("propQNameId", propQNameEntityId)
+ .setParameter("propKey", propKey)
.setString("propStringValue", value)
;
DirtySessionMethodInterceptor.setQueryFlushMode(session, query);
@@ -2387,16 +2701,17 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
{
Node node = (Node) results.get()[0];
// loop through all the node properties
- Map properties = node.getProperties();
- for (Map.Entry entry : properties.entrySet())
+ Map properties = node.getProperties();
+ for (Map.Entry entry : properties.entrySet())
{
- Long propertyQNameId = entry.getKey();
- PropertyValue propertyValue = entry.getValue();
+ PropertyMapKey propertyKey = entry.getKey();
+ NodePropertyValue propertyValue = entry.getValue();
// ignore nulls
if (propertyValue == null)
{
continue;
}
+ Long propertyQNameId = propertyKey.getQnameId();
// Get the actual value(s) as a collection
Collection values = propertyValue.getCollection(DataTypeDefinition.ANY);
// attempt to convert instance in the collection
@@ -2435,50 +2750,6 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
}
}
- /**
- * {@inheritDoc}
- */
- public int getNodeCount()
- {
- HibernateCallback callback = new HibernateCallback()
- {
- public Object doInHibernate(Session session)
- {
- Query query = session.getNamedQuery(QUERY_GET_NODE_COUNT);
- query.setMaxResults(1)
- .setReadOnly(true);
- DirtySessionMethodInterceptor.setQueryFlushMode(session, query);
- return query.uniqueResult();
- }
- };
- Long count = (Long) getHibernateTemplate().execute(callback);
- // done
- return count.intValue();
- }
-
- /**
- * {@inheritDoc}
- */
- public int getNodeCount(final StoreRef storeRef)
- {
- HibernateCallback callback = new HibernateCallback()
- {
- public Object doInHibernate(Session session)
- {
- Query query = session.getNamedQuery(QUERY_GET_NODE_COUNT_FOR_STORE);
- query.setString("protocol", storeRef.getProtocol())
- .setString("identifier", storeRef.getIdentifier())
- .setMaxResults(1)
- .setReadOnly(true);
- DirtySessionMethodInterceptor.setQueryFlushMode(session, query);
- return query.uniqueResult();
- }
- };
- Long count = (Long) getHibernateTemplate().execute(callback);
- // done
- return count.intValue();
- }
-
/*
* Queries for transactions
*/
@@ -2722,12 +2993,12 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return query.list();
}
};
- List results = (List) getHibernateTemplate().execute(callback);
+ List results = (List) getHibernateTemplate().execute(callback);
// transform into a simpler form
List nodeRefs = new ArrayList(results.size());
- for (NodeStatus nodeStatus : results)
+ for (Node node : results)
{
- NodeRef nodeRef = new NodeRef(storeRef, nodeStatus.getKey().getGuid());
+ NodeRef nodeRef = node.getNodeRef();
nodeRefs.add(nodeRef);
}
// done
@@ -2748,18 +3019,575 @@ public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements
return query.list();
}
};
- List results = (List) getHibernateTemplate().execute(callback);
+ List results = (List) getHibernateTemplate().execute(callback);
// transform into a simpler form
List nodeRefs = new ArrayList(results.size());
- for (NodeStatus nodeStatus : results)
+ for (Node node : results)
{
- NodeRef nodeRef = new NodeRef(
- nodeStatus.getKey().getProtocol(),
- nodeStatus.getKey().getIdentifier(),
- nodeStatus.getKey().getGuid());
+ NodeRef nodeRef = node.getNodeRef();
nodeRefs.add(nodeRef);
}
// done
return nodeRefs;
}
+
+ //============ PROPERTY HELPER METHODS =================//
+
+ public static Map convertToPersistentProperties(
+ Map in,
+ QNameDAO qnameDAO,
+ LocaleDAO localeDAO,
+ DictionaryService dictionaryService)
+ {
+ Map propertyMap = new HashMap(in.size() + 5);
+ for (Map.Entry entry : in.entrySet())
+ {
+ Serializable value = entry.getValue();
+ // Get the qname ID
+ QName propertyQName = entry.getKey();
+ Long propertyQNameId = qnameDAO.getOrCreateQNameEntity(propertyQName).getId();
+ // Get the locale ID
+ Long propertylocaleId = localeDAO.getOrCreateDefaultLocalePair().getFirst();
+ // Get the property definition, if available
+ PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
+ // Add it to the map
+ HibernateNodeDaoServiceImpl.addValueToPersistedProperties(
+ propertyMap,
+ propertyDef,
+ HibernateNodeDaoServiceImpl.IDX_NO_COLLECTION,
+ propertyQNameId,
+ propertylocaleId,
+ value,
+ localeDAO);
+ }
+ // Done
+ return propertyMap;
+ }
+
+ /**
+ * The collection index used to indicate that the value is not part of a collection.
+ * All values from zero up are used for real collection indexes.
+ */
+ private static final short IDX_NO_COLLECTION = -1;
+
+ /**
+ * A method that adds properties to the given map. It copes with collections.
+ *
+ * @param propertyDef the property definition (null is allowed)
+ * @param collectionIndex the index of the property in the collection or -1 if
+ * we are not yet processing a collection
+ */
+ private static void addValueToPersistedProperties(
+ Map propertyMap,
+ PropertyDefinition propertyDef,
+ short collectionIndex,
+ Long propertyQNameId,
+ Long propertyLocaleId,
+ Serializable value,
+ LocaleDAO localeDAO)
+ {
+ if (value == null)
+ {
+ // The property is null. Null is null and cannot be massaged any other way.
+ NodePropertyValue npValue = HibernateNodeDaoServiceImpl.makeNodePropertyValue(propertyDef, null);
+ PropertyMapKey npKey = new PropertyMapKey();
+ npKey.setListIndex(collectionIndex);
+ npKey.setQnameId(propertyQNameId);
+ npKey.setLocaleId(propertyLocaleId);
+ // Add it to the map
+ propertyMap.put(npKey, npValue);
+ // Done
+ return;
+ }
+
+ // Get or spoof the property datatype
+ QName propertyTypeQName;
+ if (propertyDef == null) // property not recognised
+ {
+ // allow it for now - persisting excess properties can be useful sometimes
+ propertyTypeQName = DataTypeDefinition.ANY;
+ }
+ else
+ {
+ propertyTypeQName = propertyDef.getDataType().getName();
+ }
+
+ // A property may appear to be multi-valued if the model definition is loose and
+ // an unexploded collection is passed in. Otherwise, use the model-defined behaviour
+ // strictly.
+ boolean isMultiValued;
+ if (propertyTypeQName.equals(DataTypeDefinition.ANY))
+ {
+ // It is multi-valued if required (we are not in a collection and the property is a new collection)
+ isMultiValued = (value != null) && (value instanceof Collection) && (collectionIndex == IDX_NO_COLLECTION);
+ }
+ else
+ {
+ isMultiValued = propertyDef.isMultiValued();
+ }
+
+ // Handle different scenarios.
+ // - Do we need to explode a collection?
+ // - Does the property allow collections?
+ if (collectionIndex == IDX_NO_COLLECTION && isMultiValued && !(value instanceof Collection))
+ {
+ // We are not (yet) processing a collection but the property should be part of a collection
+ HibernateNodeDaoServiceImpl.addValueToPersistedProperties(
+ propertyMap,
+ propertyDef,
+ (short) 0,
+ propertyQNameId,
+ propertyLocaleId,
+ value,
+ localeDAO);
+ }
+ else if (collectionIndex == IDX_NO_COLLECTION && value instanceof Collection)
+ {
+ // We are not (yet) processing a collection and the property is a collection i.e. needs exploding
+ // Check that multi-valued properties are supported if the property is a collection
+ if (!isMultiValued)
+ {
+ throw new DictionaryException(
+ "A single-valued property of this type may not be a collection: \n" +
+ " Property: " + propertyDef + "\n" +
+ " Type: " + propertyTypeQName + "\n" +
+ " Value: " + value);
+ }
+ // We have an allowable collection.
+ @SuppressWarnings("unchecked")
+ Collection