Derek Hulley 4872eb9909 Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-5 to HEAD:
20678: DAO5 branch: Preparation for merge back to HEAD
   20689: Merged DAO4 to DAO5
        - Removed all 'dbscripts/create/3.x/SomeDialect' and replaced with 'dbscripts/create/SomeDialect'
          DB create scripts are taken from latest DAO4
        - TODO: FixAuthoritiesCrcValuesPatch needs query implementation in PatchDAO
        Merged DAO3 to DAO4
           - Reapplied fixes for ALF-713 (race condition on Usages)
           19350: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-2 to BRANCHES/DEV/V3.3-DAO-REFACTOR-3:
               18939: SAIL-4 :2nd stage branch for DAO refactor off HEAD rev 18898
               18948: Merged V3.3-DAO-REFACTOR to V3.3-DAO-REFACTOR-2
                    18202: Dev branch for DAO refactor
                    18252: SAIL-233: QName.hbm.xml
                    18295: Added missing CREATE TABLE statements for QName-related code
                    18324: SAIL-234: Node.hbm.xml: Node aspects initial integration
                    18355: Added 'setValue' method to manually update the cached value
                    18356: MV property stressing lowered to speed test up
                    18357: SAIL-234: Node.hbm.xml
                    18376: Pulled all Alfresco-related create SQL into script
                    18389: SAIL-234: Permissions DAO refactor - initial checkpoint
                    18390: Formatting only (line-endings)
                    18400: SAIL-234: Node.hbm.xml
                    18418: SAIL-234: Node.hbm.xml: 'alf_node_assoc' CRUD
                    18429: SAIL-234: Node.hbm.xml: Cleaned out all Hibernate references to NodeAssocImpl
                    18457: SAIL-234: Permissions DAO refactor
               18959: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2
                    18479: SAIL-234: Node.hbm.xml - fix updateNode (missing id when saving oldDummyNode)
                    18482: SAIL-235: remove Permissions.hbm.xml
                    18517: SAIL-235: Permissions DAO refactor
                    18523: SAIL-234: Node.hbm.xml
                    18524: SAIL-235: Permissions DAO refactor
               18960: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2
                    18533: Flipped back to Windows line endings
                    18535: Formatting-only (eol)
                    18540: Formatting-only (eol)
                    18541: SAIL-235: Permissions DAO refactor
                    18543: SAIL-234: Node.hbm.xml: Start alf_store changes
                    18567: SAIL-235: Permissions DAO refactor
                    18596: SAIL-305:  Alfresco DDL - formatted/rationalized and added missing indexes & fk constraints
                    18603: SAIL-311: Minor cleanup for schema upgrade scripts (V3.3)
                    18604: SAIL-311: Remove empty dirs
                    18619: SAIL-274: Locale.hbm.xml
                    18621: Added method to create default ACL
                    18622: SAIL-234: Node.hbm.xml: Store, Transaction, Server and some node
                    18624: Formatting only (eol)
                    18631: SAIL-235: Permissions DAO refactor
                    18633: SAIL-235: Permissions DAO refactor - do not expose CRUD for AceContext (or AuthorityAlias) since currently unused
                    18639: getLocale(Locale) should return null if it doesn't exist
                    18640: SAIL-234 NodeDAO: More replacement of node queries and updates
                    18648: SAIL-310: Create SQL script for core repo tables (All DB ports)
                    18651: SAIL-234 NodeDAO: Moves across stores handle presence of target deleted nodes
               18961: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2
                    18658: SAIL-274 Locale DAO: Missing getValueKey() method
                    18662: SAIL-235: Permissions DAO refactor - further cleanup (of DbAccessControlList usage, including copyACLs)
                    18664: DB scripts porting for PostgreSQL finished.
                    18668: SAIL-234 Node DAO: Note in case Transaction Change ID is dropped from indexes
                    18669: SAIL-234 Node DAO: deleteNode and archive (store move) fixes
                    18672: DB scripts porting for Oracle finished. 
                    18675: SAIL-235: Permissions DAO refactor 
                    18677: DB scripts porting for DB2 finished.
               18964: Merged DEV/V3.3-DAO-REFACTOR to DEV/V3.3-DAO-REFACTOR-2
                    18687: Execute a callback with retries
                    18688: SAIL-234 Node DAO: Child association creation
                    18690: SAIL-234 Node DAO: Comment out raw creation of stores as it breaks subsequent bootstrap checks
                    18691: SAIL-234 Node DAO: More replacement of alf_child_assoc handling
                    18713: Commented about needing a more efficient removeChildAssociation method
                    18714: SAIL-234 Node DAO: Replaced queries on alf_child_assoc
                    18715: SAIL-234 Node DAO: More alf_child_assoc query replacement
                    18727: SAIL-234 Node DAO: alf_child_assoc queries complete
                    18737: SAIL-234 Node DAO: Tweaks to newNode and implemented prependPaths
                    18741: SAIL-234 and SAIL-334: Moved UsageDelta Hibernate code and queries over to UsageDeltaDAO
                    18748: SAIL-234 Node DAO: fix NPE (EditionServiceImplTest)
                    18769: SAIL-234 Node DAO: alf_node_properties ground work
                    18786: SAIL-234 Node DAO: alf_node_properties and cm:auditable properties
                    18810: Added EqualsHelper.getMapComparison
                    18813: TransactionalCache propagates cache clears and removals during rollback
                    18826: SAIL-234 Node DAO: Moved over sundry references to NodeDaoService to NodeDAO
                    18849: SAIL-237: UsageDelta.hbm.xml - eol formatting only (including removal of unwanted svn:eol-style=native property)
                    18869: SAIL-234 NodeDAO: Fixed more references to 'nodeDaoService'
                    18895: SAIL-234 NodeDAO: Queries for alf_transaction
                    18899: SAIL-234 Node DAO: Fixed bean fetching for 'nodeDAO'
                    18909: SAIL-234 NodeDAO: Fixes to getNodeRefStatus and various txn queries
                    18916: SAIL-234 NodeDAO: Fixed moveNode alf_child_assoc updates
                    18922: SAIL-235: DAO refactoring: Permission.hbm.xml
                    18930: SAIL-235: DAO refactoring: Permission.hbm.xml
                    18932: SAIL-234 NodeDAO: Fixing up gotchas, javadocs and some naming
                    18933: SAIL-234 NodeDAO: Minor neatening
                    18935: SAIL-234 Node DAO: Caches for ID to NodeRef and StoreRef
                    18936: EHCache config files line endings
                    18938: SAIL-237: Usage DAO refactor - initial checkpoint
                    18945: SAIL-235: DAO refactoring: Permission.hbm.xml. Move Node.
               18975: Fix for move-node ACL jiggery-pokery
               19067: SAIL-4: fix VersionHistoryImpl.getSuccessors (causing VersionServiceImplTest.testGetVersionHistorySameWorkspace failure)
               19068: SAIL-234: fix VersionMigratorTest.testMigrateOneVersion
               19074: SAIL-237: Usage DAO - update to common iBatis mapping pattern(s) to ease DB porting
               19076: SAIL-231: Activities DAO - update to common iBatis mapping pattern(s)
               19077: SAIL-232: AppliedPatch DAO - minor cleanup (comments & formatting only)
               19092: Merging HEAD to DEV/V3.3-DAO-REFACTOR-2
                     18973: Temporarily comment out AVMTestSuite and run AVM tests individually
                     19056: AVM unit test improvements
               19097: SAIL-235: DAO refactoring: Permission.hbm.xml: Additional index to support queries to find the id and acl id for the primary children of a node.
               19185: SAIL-238: Permissions DAO - (minor) update to common iBatis mapping pattern
               19289: SAIL-234 NodeDAO: Node cache replaces NodeRef cache
               19302: SAIL-234 Node DAO: Added cache for node properties
               19318: SAIL-4: AVM DAO - (minor) update to common iBatis mapping pattern
   20690: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-4 to BRANCHES/DEV/V3.3-DAO-REFACTOR-5:
        20063: (RECORD ONLY) DAO refactor branch V4
        20146: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4:
             19401: SAIL-234 Node DAO: Fix permission service tests (setPrimaryChildrenSharedAclId needs to invalidate nodesCache)
             19428: Fixed TransactionalCache issue with null and NullValueMarker
             19429: Took empty cm:content creation out of FileFolderService#createImpl
             19430: SAIL-234 Node DAO: Tweaks around caching and cm:auditable
             19431: SAIL-4 DAO Refactor: Exception thrown when attempting writes in read-only txn have changed
             19436: SAIL-234 Node DAO: Fix NPE during cm:auditable update
             19475: Allow debugging of code without stepping into trivial stuff
             19476: Follow-up on 19429 by ensuring CIFS/FTP set a  mimetype on the ContentWriter
             19477: SAIL-234 Node DAO: Leverage DAO better for NodeService.addProperties
             19478: SAIL-234 NodeDAO: Added toString() for ParentAssocsInfo (cache value for parent assocs)
             19479: SAIL-234 Node DAO: Fixed for parent association and property caches
             19480: Made TransactionAwareSingleton bind-key a GUID
             19481: SAIL-234 Node DAO: Reinstated 100K collection property tests
             19482: SAIL-234 Node DAO: Node and property cache fixes highlighted by unit tests
             19483: SAIL-234 Node DAO: Start on NodeBulkLoader implementation
             19595: SAIL-234 Node DAO: Fix moveNode to detect cyclic relationship prior to updating ACLs for moved tree FileFolderServiceImplTest.testETHREEOH_3088_MoveIntoSelf)
        20147: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4:
             19602: (RECORD ONLY) Reintegrated with HEAD up to rev 19433
             19621: (RECORD ONLY) SAIL-347
             19683: (RECORD ONLY) Reverse-merged 19621 for SAIL-347
             19722: (RECORD ONLY) Merged /alfresco/HEAD:r19434-19721
        20150: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4:
             19741: Merged DEV\V3.3-DAO-REFACTOR-2 to DEV\V3.3-DAO-REFACTOR-3
                   19739: Extended "move" tests
             19743: Fix AuditableAspectTest.testAddAspect (to allow for node modified date tolerance)
             19748: Remaining part of merge from HEAD to V3.3-DAO-REFACTOR-3
                   19367: Merged BRANCHES/V3.2 to HEAD:
                       19286: Fix for ALF-626 "Using 'null' as an authority argument in clearPermissions() cause a java.lang.NullPointerException"
             19755: SAIL-234 Node DAO: Fix RepoAdminServiceImplTest.testConcurrentDynamicModelDelete (handle InvalidNodeRefException after getChildAssocs)
   20692: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-4 to BRANCHES/DEV/V3.3-DAO-REFACTOR-5:
        - Retired all 1.3 and 1.4 upgrade scripts ... R.I.P.
        - Fixed CRC patch for Authorities (only tested on MySQL)
        - Fixed SQL patch revision numbers and bumped version schema number up
        20158: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4:
             19773: SQL mappings and scripts: SAIL-310, SAIL-304, SAIL-303 and SAIL-347
             19774: Futher fix for SAIL-310: Sequence patch must take into account sequences created for 3.3
             19851: SAIL-371 (SAIL-294) NodeDAO fallout: Fix QName and Namespace read/write handling and bean name in unit test
        20183: Merged DAO3 to DAO4
             19852: SAIL-370: Remove LinkValidation
             19853: SAIL-239 (SAIL-294) Attributes.hbm.xml: Added ability to attach arbitrary property to unique context
             19857: SAIL-373 Fallout from Permissions DAO refactor (SAIL-235)
             19864: SAIL-239 (SAIL-294): Removed AttributeService RMI API
             19865: More SAIL-239 (SAIL-294): Removed AttributeService RMI API
        20208: DAO-refactor implementation of ALF-2712 query improvements
        20209: Merged BRANCHES/DEV/V3.3-DAO-REFACTOR-3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4:
             20060: Removal of AttributeService for SAIL-239 (SAIL-294)
        20348: SAIL-371 (SAIL-294): Protect collection properties during map insert and retrieval
        20547: SAIL-371 (SAIL-294) Attributes.hbm.xml: implement getAttributes + fixes
        20573: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests and other fallout
        20597: SAIL-239 Attributes.hbm.xml: WCM/AVM locking test fixes (wip)
        20598: SAIL-239 Attributes.hbm.xml: WCM/AVM locking test fixes (wip) - fix AssetServiceImplTest.testSimpleLockFile NPE
        20600: Fix PropertyValueDAOTest.testPropertyValue_Enum (follow-on to r20060 for SAIL-239 - which introduces ENUM prop vals)
        20601: Fix UsageDAOTest.testCreateAndDeleteUsageDeltas NPE (would also affect ContentStoreCleanerScalabilityRunner)
        20603: Fix CMISPropertyServiceTest.* (fallout from r20146 <- r19429 <- Took empty cm:content creation out of FileFolderService#createImpl)
        20604: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - TransferServiceImplTest.*
        20618: SAIL-371 (SAIL-294): NodeDAO: AuditableAspectTest (fix testCreateNodeWithAuditableProperties_ALF_2565 + add remove aspect test)
        20624: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - UserUsageTest.*
        20626: Fixed random keys for RuleTrigger NodeRef tracking
        20635: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - PersonTest.testSplitDuplicates
        20642: SAIL-371 (SAIL-294) DAO: Fixed CacheTest
        20643: Removed must of the 'distribute' target's dependencies.  Not for HEAD
        20645: Follow-on to r20643 (Removed most of the 'distribute' target's dependencies.  Not for HEAD)
        20654: SAIL-371 (SAIL-294): NodeDAO: DMDeploymentTargetTest.* (do not try to remove mandatory aspects)
        20655: SAIL-371 (SAIL-294): NodeDAO: Initial fix for TaggingServiceImplTest.testTagScopeUpdateViaNodePolicies (+ minor test cleanup)
        20657: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - VersionMigratorTest.testMigrateOneVersion (cm:accessed not returned if null)
        20658: Merged (back merge only - no merge info) BRANCHES/V3.3 to BRANCHES/DEV/V3.3-DAO-REFACTOR-4:
             20090: Dynamic models: minor improvements to DictionaryModelType
             20554: Improvement to model delete validation (investigating intermittent failure of RepoAdminServiceImplTest.testSimpleDynamicModelViaNodeService)
        20662: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - RecordsManagementAuditServiceImplTest.* (we now ignore attempt to update 'cm:modifier' prop so update 'cm:title' prop instead)
        20666: SAIL-371 (SAIL-294): NodeDAO: Fix unit tests - ADMLuceneTest.*
        20668: SAIL-239 (SAIL-294) - delete WCM locks + tests (follow-on to r20060)
        20674: SAIL-371 (SAIL-294) NodeDAO fallout: Cleaner and additional checks for ContentStoreCleaner
        20675: SAIL-371 (SAIL-294) NodeDAO fallout: Fixed handling of ContentData


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@20693 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
2010-06-17 19:35:49 +00:00

658 lines
29 KiB
Java

/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing"
*/
package org.alfresco.repo.domain.node;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.domain.ContentDataId;
import org.alfresco.repo.domain.contentdata.ContentDataDAO;
import org.alfresco.repo.domain.locale.LocaleDAO;
import org.alfresco.repo.domain.qname.QNameDAO;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryException;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.service.cmr.repository.ContentData;
import org.alfresco.service.cmr.repository.MLText;
import org.alfresco.service.cmr.repository.datatype.TypeConversionException;
import org.alfresco.service.namespace.QName;
import org.alfresco.util.Pair;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This class provides services for translating exploded properties
* (as persisted in <b>alf_node_properties</b>) in the public form, which is a
* <tt>Map</tt> of values keyed by their <tt>QName</tt>.
*
* @author Derek Hulley
* @since 3.4
*/
public class NodePropertyHelper
{
private static final Log logger = LogFactory.getLog(NodePropertyHelper.class);
private final DictionaryService dictionaryService;
private final QNameDAO qnameDAO;
private final LocaleDAO localeDAO;
private final ContentDataDAO contentDataDAO;
/**
* Construct the helper with the appropriate DAOs and services
*/
public NodePropertyHelper(
DictionaryService dictionaryService,
QNameDAO qnameDAO,
LocaleDAO localeDAO,
ContentDataDAO contentDataDAO)
{
this.dictionaryService = dictionaryService;
this.qnameDAO = qnameDAO;
this.localeDAO = localeDAO;
this.contentDataDAO = contentDataDAO;
}
public Map<NodePropertyKey, NodePropertyValue> convertToPersistentProperties(Map<QName, Serializable> in)
{
Map<NodePropertyKey, NodePropertyValue> propertyMap = new HashMap<NodePropertyKey, NodePropertyValue>(
in.size() + 5);
for (Map.Entry<QName, Serializable> entry : in.entrySet())
{
Serializable value = entry.getValue();
// Get the qname ID
QName propertyQName = entry.getKey();
Long propertyQNameId = qnameDAO.getOrCreateQName(propertyQName).getFirst();
// Get the locale ID
Long propertylocaleId = localeDAO.getOrCreateDefaultLocalePair().getFirst();
// Get the property definition, if available
PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
// Add it to the map
addValueToPersistedProperties(
propertyMap,
propertyDef,
NodePropertyHelper.IDX_NO_COLLECTION,
propertyQNameId,
propertylocaleId,
value);
}
// Done
return propertyMap;
}
/**
* The collection index used to indicate that the value is not part of a collection. All values from zero up are
* used for real collection indexes.
*/
private static final int IDX_NO_COLLECTION = -1;
/**
* A method that adds properties to the given map. It copes with collections.
*
* @param propertyDef the property definition (<tt>null</tt> is allowed)
* @param collectionIndex the index of the property in the collection or <tt>-1</tt> if we are not yet processing a
* collection
*/
private void addValueToPersistedProperties(
Map<NodePropertyKey, NodePropertyValue> propertyMap,
PropertyDefinition propertyDef,
int collectionIndex,
Long propertyQNameId,
Long propertyLocaleId,
Serializable value)
{
if (value == null)
{
// The property is null. Null is null and cannot be massaged any other way.
NodePropertyValue npValue = makeNodePropertyValue(propertyDef, null);
NodePropertyKey npKey = new NodePropertyKey();
npKey.setListIndex(collectionIndex);
npKey.setQnameId(propertyQNameId);
npKey.setLocaleId(propertyLocaleId);
// Add it to the map
propertyMap.put(npKey, npValue);
// Done
return;
}
// Get or spoof the property datatype
QName propertyTypeQName;
if (propertyDef == null) // property not recognised
{
// allow it for now - persisting excess properties can be useful sometimes
propertyTypeQName = DataTypeDefinition.ANY;
}
else
{
propertyTypeQName = propertyDef.getDataType().getName();
}
// A property may appear to be multi-valued if the model definition is loose and
// an unexploded collection is passed in. Otherwise, use the model-defined behaviour
// strictly.
boolean isMultiValued;
if (propertyTypeQName.equals(DataTypeDefinition.ANY))
{
// It is multi-valued if required (we are not in a collection and the property is a new collection)
isMultiValued = (value != null) && (value instanceof Collection<?>)
&& (collectionIndex == IDX_NO_COLLECTION);
}
else
{
isMultiValued = propertyDef.isMultiValued();
}
// Handle different scenarios.
// - Do we need to explode a collection?
// - Does the property allow collections?
if (collectionIndex == IDX_NO_COLLECTION && isMultiValued && !(value instanceof Collection<?>))
{
// We are not (yet) processing a collection but the property should be part of a collection
addValueToPersistedProperties(
propertyMap,
propertyDef,
0,
propertyQNameId,
propertyLocaleId,
value);
}
else if (collectionIndex == IDX_NO_COLLECTION && value instanceof Collection<?>)
{
// We are not (yet) processing a collection and the property is a collection i.e. needs exploding
// Check that multi-valued properties are supported if the property is a collection
if (!isMultiValued)
{
throw new DictionaryException("A single-valued property of this type may not be a collection: \n"
+ " Property: " + propertyDef + "\n" + " Type: " + propertyTypeQName + "\n" + " Value: "
+ value);
}
// We have an allowable collection.
@SuppressWarnings("unchecked")
Collection<Object> collectionValues = (Collection<Object>) value;
// Persist empty collections directly. This is handled by the NodePropertyValue.
if (collectionValues.size() == 0)
{
NodePropertyValue npValue = makeNodePropertyValue(null,
(Serializable) collectionValues);
NodePropertyKey npKey = new NodePropertyKey();
npKey.setListIndex(NodePropertyHelper.IDX_NO_COLLECTION);
npKey.setQnameId(propertyQNameId);
npKey.setLocaleId(propertyLocaleId);
// Add it to the map
propertyMap.put(npKey, npValue);
}
// Break it up and recurse to persist the values.
collectionIndex = -1;
for (Object collectionValueObj : collectionValues)
{
collectionIndex++;
if (collectionValueObj != null && !(collectionValueObj instanceof Serializable))
{
throw new IllegalArgumentException("Node properties must be fully serializable, "
+ "including values contained in collections. \n" + " Property: " + propertyDef + "\n"
+ " Index: " + collectionIndex + "\n" + " Value: " + collectionValueObj);
}
Serializable collectionValue = (Serializable) collectionValueObj;
try
{
addValueToPersistedProperties(
propertyMap,
propertyDef,
collectionIndex,
propertyQNameId,
propertyLocaleId,
collectionValue);
}
catch (Throwable e)
{
throw new AlfrescoRuntimeException("Failed to persist collection entry: \n" + " Property: "
+ propertyDef + "\n" + " Index: " + collectionIndex + "\n" + " Value: "
+ collectionValue, e);
}
}
}
else
{
// We are either processing collection elements OR the property is not a collection
// Collections of collections are only supported by type d:any
if (value instanceof Collection<?> && !propertyTypeQName.equals(DataTypeDefinition.ANY))
{
throw new DictionaryException(
"Collections of collections (Serializable) are only supported by type 'd:any': \n"
+ " Property: " + propertyDef + "\n" + " Type: " + propertyTypeQName + "\n"
+ " Value: " + value);
}
// Handle ContentData
// We used to check the property type, but we now handle d:any ContentData as well
if (value instanceof ContentData)
{
// We keep the ContentData i.e. we treat it as a low-level property that will be handled externally.
// This will be converted to a String and persisted as such unless the value is ultimately
// replaced by and ID-based ContentData reference
// // Needs converting to an ID
// ContentData contentData = (ContentData) value;
// Long contentDataId = contentDataDAO.createContentData(contentData).getFirst();
// value = new ContentDataId(contentDataId);
}
// Handle MLText
if (value instanceof MLText)
{
// This needs to be split up into individual strings
MLText mlTextValue = (MLText) value;
for (Map.Entry<Locale, String> mlTextEntry : mlTextValue.entrySet())
{
Locale mlTextLocale = mlTextEntry.getKey();
String mlTextStr = mlTextEntry.getValue();
// Get the Locale ID for the text
Long mlTextLocaleId = localeDAO.getOrCreateLocalePair(mlTextLocale).getFirst();
// This is persisted against the current locale, but as a d:text instance
NodePropertyValue npValue = new NodePropertyValue(DataTypeDefinition.TEXT, mlTextStr);
NodePropertyKey npKey = new NodePropertyKey();
npKey.setListIndex(collectionIndex);
npKey.setQnameId(propertyQNameId);
npKey.setLocaleId(mlTextLocaleId);
// Add it to the map
propertyMap.put(npKey, npValue);
}
}
else
{
NodePropertyValue npValue = makeNodePropertyValue(propertyDef, value);
NodePropertyKey npKey = new NodePropertyKey();
npKey.setListIndex(collectionIndex);
npKey.setQnameId(propertyQNameId);
npKey.setLocaleId(propertyLocaleId);
// Add it to the map
propertyMap.put(npKey, npValue);
}
}
}
/**
* Helper method to convert the <code>Serializable</code> value into a full, persistable {@link NodePropertyValue}.
* <p>
* Where the property definition is null, the value will take on the {@link DataTypeDefinition#ANY generic ANY}
* value.
* <p>
* Collections are NOT supported. These must be split up by the calling code before calling this method. Map
* instances are supported as plain serializable instances.
*
* @param propertyDef the property dictionary definition, may be null
* @param value the value, which will be converted according to the definition - may be null
* @return Returns the persistable property value
*/
private NodePropertyValue makeNodePropertyValue(PropertyDefinition propertyDef, Serializable value)
{
// get property attributes
final QName propertyTypeQName;
if (propertyDef == null) // property not recognised
{
// allow it for now - persisting excess properties can be useful sometimes
propertyTypeQName = DataTypeDefinition.ANY;
}
else
{
propertyTypeQName = propertyDef.getDataType().getName();
}
try
{
NodePropertyValue propertyValue = new NodePropertyValue(propertyTypeQName, value);
// done
return propertyValue;
}
catch (TypeConversionException e)
{
throw new TypeConversionException(
"The property value is not compatible with the type defined for the property: \n" + " property: "
+ (propertyDef == null ? "unknown" : propertyDef) + "\n" + " value: " + value + "\n"
+ " value type: " + value.getClass(), e);
}
}
public Serializable getPublicProperty(
Map<NodePropertyKey,
NodePropertyValue> propertyValues,
QName propertyQName)
{
// Get the qname ID
Pair<Long, QName> qnamePair = qnameDAO.getQName(propertyQName);
if (qnamePair == null)
{
// There is no persisted property with that QName, so we can't match anything
return null;
}
Long qnameId = qnamePair.getFirst();
// Now loop over the properties and extract those with the given qname ID
SortedMap<NodePropertyKey, NodePropertyValue> scratch = new TreeMap<NodePropertyKey, NodePropertyValue>();
for (Map.Entry<NodePropertyKey, NodePropertyValue> entry : propertyValues.entrySet())
{
NodePropertyKey propertyKey = entry.getKey();
if (propertyKey.getQnameId().equals(qnameId))
{
scratch.put(propertyKey, entry.getValue());
}
}
// If we found anything, then collapse the properties to a Serializable
if (scratch.size() > 0)
{
PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName);
Serializable collapsedValue = collapsePropertiesWithSameQName(propertyDef, scratch);
return collapsedValue;
}
else
{
return null;
}
}
public Map<QName, Serializable> convertToPublicProperties(Map<NodePropertyKey, NodePropertyValue> propertyValues)
{
Map<QName, Serializable> propertyMap = new HashMap<QName, Serializable>(propertyValues.size(), 1.0F);
// Shortcut
if (propertyValues.size() == 0)
{
return propertyMap;
}
// We need to process the properties in order
SortedMap<NodePropertyKey, NodePropertyValue> sortedPropertyValues = new TreeMap<NodePropertyKey, NodePropertyValue>(
propertyValues);
// A working map. Ordering is important.
SortedMap<NodePropertyKey, NodePropertyValue> scratch = new TreeMap<NodePropertyKey, NodePropertyValue>();
// Iterate (sorted) over the map entries and extract values with the same qname
Long currentQNameId = Long.MIN_VALUE;
Iterator<Map.Entry<NodePropertyKey, NodePropertyValue>> iterator = sortedPropertyValues.entrySet().iterator();
while (true)
{
Long nextQNameId = null;
NodePropertyKey nextPropertyKey = null;
NodePropertyValue nextPropertyValue = null;
// Record the next entry's values
if (iterator.hasNext())
{
Map.Entry<NodePropertyKey, NodePropertyValue> entry = iterator.next();
nextPropertyKey = entry.getKey();
nextPropertyValue = entry.getValue();
nextQNameId = nextPropertyKey.getQnameId();
}
// If the QName is going to change, and we have some entries to process, then process them.
if (scratch.size() > 0 && (nextQNameId == null || !nextQNameId.equals(currentQNameId)))
{
QName currentQName = qnameDAO.getQName(currentQNameId).getSecond();
PropertyDefinition currentPropertyDef = dictionaryService.getProperty(currentQName);
// We have added something to the scratch properties but the qname has just changed
Serializable collapsedValue = null;
// We can shortcut if there is only one value
if (scratch.size() == 1)
{
// There is no need to collapse list indexes
collapsedValue = collapsePropertiesWithSameQNameAndListIndex(currentPropertyDef, scratch);
}
else
{
// There is more than one value so the list indexes need to be collapsed
collapsedValue = collapsePropertiesWithSameQName(currentPropertyDef, scratch);
}
// If the property is multi-valued then the output property must be a collection
if (currentPropertyDef != null && currentPropertyDef.isMultiValued())
{
if (collapsedValue != null && !(collapsedValue instanceof Collection<?>))
{
// Can't use Collections.singletonList: ETHREEOH-1172
ArrayList<Serializable> collection = new ArrayList<Serializable>(1);
collection.add(collapsedValue);
collapsedValue = collection;
}
}
// Store the value
propertyMap.put(currentQName, collapsedValue);
// Reset
scratch.clear();
}
if (nextQNameId != null)
{
// Add to the current entries
scratch.put(nextPropertyKey, nextPropertyValue);
currentQNameId = nextQNameId;
}
else
{
// There is no next value to process
break;
}
}
// Done
return propertyMap;
}
private Serializable collapsePropertiesWithSameQName(
PropertyDefinition propertyDef,
SortedMap<NodePropertyKey, NodePropertyValue> sortedPropertyValues)
{
Serializable result = null;
Collection<Serializable> collectionResult = null;
// A working map. Ordering is not important for this map.
Map<NodePropertyKey, NodePropertyValue> scratch = new HashMap<NodePropertyKey, NodePropertyValue>(3);
// Iterate (sorted) over the map entries and extract values with the same list index
Integer currentListIndex = Integer.MIN_VALUE;
Iterator<Map.Entry<NodePropertyKey, NodePropertyValue>> iterator = sortedPropertyValues.entrySet().iterator();
while (true)
{
Integer nextListIndex = null;
NodePropertyKey nextPropertyKey = null;
NodePropertyValue nextPropertyValue = null;
// Record the next entry's values
if (iterator.hasNext())
{
Map.Entry<NodePropertyKey, NodePropertyValue> entry = iterator.next();
nextPropertyKey = entry.getKey();
nextPropertyValue = entry.getValue();
nextListIndex = nextPropertyKey.getListIndex();
}
// If the list index is going to change, and we have some entries to process, then process them.
if (scratch.size() > 0 && (nextListIndex == null || !nextListIndex.equals(currentListIndex)))
{
// We have added something to the scratch properties but the index has just changed
Serializable collapsedValue = collapsePropertiesWithSameQNameAndListIndex(propertyDef, scratch);
// Store. If there is a value already, then we must build a collection.
if (result == null)
{
result = collapsedValue;
}
else if (collectionResult != null)
{
// We have started a collection, so just add the value to it.
collectionResult.add(collapsedValue);
}
else
{
// We already had a result, and now have another. A collection has not been
// started. We start a collection and explicitly keep track of it so that
// we don't get mixed up with collections of collections (ETHREEOH-2064).
collectionResult = new ArrayList<Serializable>(20);
collectionResult.add(result); // Add the first result
collectionResult.add(collapsedValue); // Add the new value
result = (Serializable) collectionResult;
}
// Reset
scratch.clear();
}
if (nextListIndex != null)
{
// Add to the current entries
scratch.put(nextPropertyKey, nextPropertyValue);
currentListIndex = nextListIndex;
}
else
{
// There is no next value to process
break;
}
}
// Make sure that multi-valued properties are returned as a collection
if (propertyDef != null && propertyDef.isMultiValued() && result != null && !(result instanceof Collection<?>))
{
// Can't use Collections.singletonList: ETHREEOH-1172
ArrayList<Serializable> collection = new ArrayList<Serializable>(1);
collection.add(result);
result = collection;
}
// Done
return result;
}
/**
* At this level, the properties have the same qname and list index. They can only be separated by locale.
* Typically, MLText will fall into this category as only.
* <p>
* If there are multiple values then they can only be separated by locale. If they are separated by locale, then
* they have to be text-based. This means that the only way to store them is via MLText. Any other multi-locale
* properties cannot be deserialized.
*/
private Serializable collapsePropertiesWithSameQNameAndListIndex(
PropertyDefinition propertyDef,
Map<NodePropertyKey, NodePropertyValue> propertyValues)
{
int propertyValuesSize = propertyValues.size();
Serializable value = null;
if (propertyValuesSize == 0)
{
// Nothing to do
}
for (Map.Entry<NodePropertyKey, NodePropertyValue> entry : propertyValues.entrySet())
{
NodePropertyKey propertyKey = entry.getKey();
NodePropertyValue propertyValue = entry.getValue();
if (propertyValuesSize == 1
&& (propertyDef == null || !propertyDef.getDataType().getName().equals(DataTypeDefinition.MLTEXT)))
{
// This is the only value and it is NOT to be converted to MLText
value = makeSerializableValue(propertyDef, propertyValue);
}
else
{
// There are multiple values, so add them to MLText
MLText mltext = (value == null) ? new MLText() : (MLText) value;
try
{
String mlString = (String) propertyValue.getValue(DataTypeDefinition.TEXT);
// Get the locale
Long localeId = propertyKey.getLocaleId();
Locale locale = localeDAO.getLocalePair(localeId).getSecond();
// Add to the MLText object
mltext.addValue(locale, mlString);
}
catch (TypeConversionException e)
{
// Ignore
logger.warn("Unable to add property value to MLText instance: " + propertyValue);
}
value = mltext;
}
}
// Done
return value;
}
/**
* Extracts the externally-visible property from the persistable value.
*
* @param propertyDef the model property definition - may be <tt>null</tt>
* @param propertyValue the persisted property
* @return Returns the value of the property in the format dictated by the property definition,
* or null if the property value is null
*/
public Serializable makeSerializableValue(PropertyDefinition propertyDef, NodePropertyValue propertyValue)
{
if (propertyValue == null)
{
return null;
}
// get property attributes
final QName propertyTypeQName;
if (propertyDef == null)
{
// allow this for now
propertyTypeQName = DataTypeDefinition.ANY;
}
else
{
propertyTypeQName = propertyDef.getDataType().getName();
}
try
{
Serializable value = propertyValue.getValue(propertyTypeQName);
// Handle conversions to and from ContentData
if (value instanceof ContentDataId)
{
// ContentData used to be persisted as a String and then as a Long.
// Now it has a special type to denote the ID
Long contentDataId = ((ContentDataId) value).getId();
Pair<Long, ContentData> contentDataPair = contentDataDAO.getContentData(contentDataId);
if (contentDataPair == null)
{
// It is invalid
value = null;
}
else
{
value = contentDataPair.getSecond();
}
}
else if (propertyTypeQName.equals(DataTypeDefinition.CONTENT) && (value instanceof Long))
{
// ContentData used to be persisted
Pair<Long, ContentData> contentDataPair = contentDataDAO.getContentData((Long) value);
if (contentDataPair == null)
{
// It is invalid
value = null;
}
else
{
value = contentDataPair.getSecond();
}
}
// done
return value;
}
catch (TypeConversionException e)
{
throw new TypeConversionException(
"The property value is not compatible with the type defined for the property: \n" +
" property: " + (propertyDef == null ? "unknown" : propertyDef) + "\n" +
" property value: " + propertyValue, e);
}
}
}