Fixed ALF-4512: MLText and NULL storage problems

- Synchronizing the cached values with the low-level DB keys had problems for some use-cases
   - Switching from null ML value in default locale (real null) to null value in a specific locale
   - Switching from d:any empty array to d:any empty array of empty arrays
   - other odd cases
 - Refactored the differencing code to do high-level differences
   - This makes it easier to take care of the switches between properties states
   - Sacrifices on performance when dealing with 100K multivalued properties


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@22059 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2010-08-29 20:38:39 +00:00
parent 8651121fce
commit 49e20b142d
4 changed files with 113 additions and 135 deletions

View File

@@ -1631,34 +1631,36 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
// Remove sys:referenceable // Remove sys:referenceable
ReferenceablePropertiesEntity.removeReferenceableProperties(node, newProps); ReferenceablePropertiesEntity.removeReferenceableProperties(node, newProps);
// Get the cached properties // Load the current properties.
Map<QName, Serializable> oldPropsCached = getNodePropertiesCached(nodeId); // This means that we have to go to the DB during cold-write operations,
// but usually a write occurs after a node has been fetched of viewed in
// some way by the client code. Loading the existing properties has the
// advantage that the differencing code can eliminate unnecessary writes
// completely.
Map<QName, Serializable> oldPropsCached = getNodePropertiesCached(nodeId); // Keep pristine for caching
Map<QName, Serializable> oldProps = new HashMap<QName, Serializable>(oldPropsCached); Map<QName, Serializable> oldProps = new HashMap<QName, Serializable>(oldPropsCached);
// If this is an add-only operation, remove any properties we are not interested in // If we're adding, remove current properties that are not of interest
if (isAddOnly) if (isAddOnly)
{ {
oldProps.keySet().retainAll(newProps.keySet()); oldProps.keySet().retainAll(newProps.keySet());
} }
// Convert to a raw format for comparison // We need to convert the new properties to our internally-used format,
Map<NodePropertyKey, NodePropertyValue> oldPropsRaw = nodePropertyHelper.convertToPersistentProperties(oldProps); // which is compatible with model i.e. people may have passed in data
// which needs to be converted to a model-compliant format. We do this
// Get new property raw values // before comparisons to avoid false negatives.
Map<NodePropertyKey, NodePropertyValue> newPropsRaw = nodePropertyHelper.convertToPersistentProperties(newProps); Map<NodePropertyKey, NodePropertyValue> newPropsRaw = nodePropertyHelper.convertToPersistentProperties(newProps);
newProps = nodePropertyHelper.convertToPublicProperties(newPropsRaw);
// Copy for modification // Now find out what's changed
Map<NodePropertyKey, NodePropertyValue> propsToDelete = new HashMap<NodePropertyKey, NodePropertyValue>(oldPropsRaw); Map<QName, MapValueComparison> diff = EqualsHelper.getMapComparison(
Map<NodePropertyKey, NodePropertyValue> propsToAdd = new HashMap<NodePropertyKey, NodePropertyValue>(newPropsRaw); oldProps,
newProps);
// Compare these fine-grained properties // Keep track of properties to delete and add
Map<NodePropertyKey, MapValueComparison> persistableDiff = EqualsHelper.getMapComparison( Set<QName> propsToDelete = new HashSet<QName>(oldProps.size()*2);
propsToDelete, Map<QName, Serializable> propsToAdd = new HashMap<QName, Serializable>(newProps.size() * 2);
propsToAdd); Set<QName> contentQNamesToDelete = new HashSet<QName>(5);
// Add or remove properties as we go for (Map.Entry<QName, MapValueComparison> entry : diff.entrySet())
for (Map.Entry<NodePropertyKey, MapValueComparison> entry : persistableDiff.entrySet())
{ {
NodePropertyKey key = entry.getKey(); QName qname = entry.getKey();
QName qname = qnameDAO.getQName(key.getQnameId()).getSecond();
PropertyDefinition removePropDef = dictionaryService.getProperty(qname); PropertyDefinition removePropDef = dictionaryService.getProperty(qname);
boolean isContent = (removePropDef != null && boolean isContent = (removePropDef != null &&
@@ -1666,135 +1668,102 @@ public abstract class AbstractNodeDAOImpl implements NodeDAO, BatchingDAO
switch (entry.getValue()) switch (entry.getValue())
{ {
case NULL: case EQUAL:
case EQUAL: // Ignore
// The entries are the same break;
propsToDelete.remove(key); case LEFT_ONLY:
propsToAdd.remove(key); // Not in the new properties
continue; propsToDelete.add(qname);
case RIGHT_ONLY: if (isContent)
// Only in new props: add
propsToDelete.remove(key);
// Handle new content
if (isContent)
{
// The new value needs conversion to the ID-based ContentData reference
NodePropertyValue newPropValue = propsToAdd.get(key);
ContentData newContentData = (ContentData) newPropValue.getValue(DataTypeDefinition.CONTENT);
if (newContentData != null)
{ {
contentQNamesToDelete.add(qname);
}
break;
case NOT_EQUAL:
// Must remove from the LHS
propsToDelete.add(qname);
if (isContent)
{
contentQNamesToDelete.add(qname);
}
// Fall through to load up the RHS
case RIGHT_ONLY:
// We're adding this
Serializable value = newProps.get(qname);
if (isContent && value != null)
{
ContentData newContentData = (ContentData) value;
Long newContentDataId = contentDataDAO.createContentData(newContentData).getFirst(); Long newContentDataId = contentDataDAO.createContentData(newContentData).getFirst();
newPropValue = new NodePropertyValue(DataTypeDefinition.CONTENT, new ContentDataId(newContentDataId)); value = new ContentDataWithId(newContentData, newContentDataId);
propsToAdd.put(key, newPropValue);
newPropsRaw.put(
key,
new NodePropertyValue(
DataTypeDefinition.CONTENT,
new ContentDataWithId(newContentData, newContentDataId)));
} }
} propsToAdd.put(qname, value);
continue; break;
case LEFT_ONLY: default:
// Only present in old props: must not be added throw new IllegalStateException("Unknown MapValueComparison: " + entry.getValue());
propsToAdd.remove(key);
// Handle deleted content
if (isContent)
{
// The old values will be an ID-based ContentData reference
NodePropertyValue valueToDelete = propsToDelete.get(key);
ContentDataWithId contentDataWithId = (ContentDataWithId) valueToDelete.getValue(DataTypeDefinition.CONTENT);
if (contentDataWithId != null)
{
Long contentDataId = contentDataWithId.getId();
contentDataDAO.deleteContentData(contentDataId);
}
}
continue;
case NOT_EQUAL:
// Value has changed: remove and add
if (isContent)
{
// The old values will be an ID-based ContentData reference
NodePropertyValue valueToDelete = propsToDelete.get(key);
ContentDataWithId contentDataWithId = (ContentDataWithId) valueToDelete.getValue(DataTypeDefinition.CONTENT);
if (contentDataWithId != null)
{
Long contentDataId = contentDataWithId.getId();
contentDataDAO.deleteContentData(contentDataId);
}
// The new value needs conversion to the ID-based ContentData reference
NodePropertyValue newPropValue = propsToAdd.get(key);
ContentData newContentData = (ContentData) newPropValue.getValue(DataTypeDefinition.CONTENT);
if (newContentData != null)
{
Long newContentDataId = contentDataDAO.createContentData(newContentData).getFirst();
newPropValue = new NodePropertyValue(DataTypeDefinition.CONTENT, new ContentDataId(newContentDataId));
propsToAdd.put(key, newPropValue);
newPropsRaw.put(
key,
new NodePropertyValue(
DataTypeDefinition.CONTENT,
new ContentDataWithId(newContentData, newContentDataId)));
}
}
continue;
default:
throw new IllegalStateException("Unknown MapValueComparison: " + entry.getValue());
} }
} }
try
{
// Remove by key
List<NodePropertyKey> propKeysToDeleteList = new ArrayList<NodePropertyKey>(propsToDelete.keySet());
int deleted = deleteNodeProperties(nodeId, propKeysToDeleteList);
if (deleted != propKeysToDeleteList.size())
{
// The translation of the left-hand map didn't match the database
// This happens when we use d:any and switch from a simple value to a collection: an edge case! foreach
throw new DataIntegrityViolationException("Only deleted " + deleted + "/" + propKeysToDeleteList.size());
}
// Add by key-value
insertNodeProperties(nodeId, propsToAdd);
}
catch (RuntimeException e)
{
// Don't trust the properties cache for the node
propertiesCache.removeByKey(nodeId);
// Focused error
throw new AlfrescoRuntimeException(
"Failed to write property deltas: \n" +
" Node: " + nodeId + "\n" +
" Old: " + oldProps + "\n" +
" New: " + newProps + "\n" +
" Old Raw: " + oldPropsRaw + "\n" +
" New Raw: " + newPropsRaw + "\n" +
" Diff: " + persistableDiff + "\n" +
" Delete Tried: " + propsToDelete.keySet() + "\n" +
" Add Tried: " + propsToAdd,
e);
}
boolean updated = propsToDelete.size() > 0 || propsToAdd.size() > 0; boolean updated = propsToDelete.size() > 0 || propsToAdd.size() > 0;
// Touch to bring into current txn // Touch to bring into current txn
if (updated) if (updated)
{ {
// Fix properties up w.r.t. types, etc // Clean up content properties
newProps = nodePropertyHelper.convertToPublicProperties(newPropsRaw); try
{
if (contentQNamesToDelete.size() > 0)
{
Set<Long> contentQNameIdsToDelete = qnameDAO.convertQNamesToIds(contentQNamesToDelete, false);
contentDataDAO.deleteContentDataForNode(nodeId, contentQNameIdsToDelete);
}
}
catch (Throwable e)
{
throw new AlfrescoRuntimeException(
"Failed to delete content properties: \n" +
" Node: " + nodeId + "\n" +
" Delete Tried: " + contentQNamesToDelete,
e);
}
try
{
// Apply deletes
Set<Long> propQNameIdsToDelete = qnameDAO.convertQNamesToIds(propsToDelete, true);
deleteNodeProperties(nodeId, propQNameIdsToDelete);
// Now create the raw properties for adding
newPropsRaw = nodePropertyHelper.convertToPersistentProperties(propsToAdd);
insertNodeProperties(nodeId, newPropsRaw);
}
catch (Throwable e)
{
// Don't trust the properties cache for the node
propertiesCache.removeByKey(nodeId);
// Focused error
throw new AlfrescoRuntimeException(
"Failed to write property deltas: \n" +
" Node: " + nodeId + "\n" +
" Old: " + oldProps + "\n" +
" New: " + newProps + "\n" +
" Diff: " + diff + "\n" +
" Delete Tried: " + propsToDelete + "\n" +
" Add Tried: " + propsToAdd,
e);
}
// Build the properties to cache based on whether this is an append or replace // Build the properties to cache based on whether this is an append or replace
Map<QName, Serializable> propsToCache = null; Map<QName, Serializable> propsToCache = null;
if (isAddOnly) if (isAddOnly)
{ {
// Combine the old and new properties // Combine the old and new properties
propsToCache = oldPropsCached; propsToCache = oldPropsCached;
propsToCache.putAll(newProps); propsToCache.putAll(propsToAdd);
} }
else else
{ {
// Replace old properties // Replace old properties
propsToCache = newProps; propsToCache = newProps;
propsToCache.putAll(propsToAdd); // Ensure correct types
} }
// Update cache // Update cache
setNodePropertiesCached(nodeId, propsToCache); setNodePropertiesCached(nodeId, propsToCache);

View File

@@ -578,8 +578,9 @@ public class NodePropertyHelper
// Get the local entry value // Get the local entry value
Serializable entryValue = makeSerializableValue(propertyDef, propertyValue); Serializable entryValue = makeSerializableValue(propertyDef, propertyValue);
// A default locale indicates a simple value i.e. the entry represents the whole value. // A default locale indicates a simple value i.e. the entry represents the whole value,
if (isDefaultLocale) // unless the dictionary specifically declares it to be d:mltext
if (isDefaultLocale && !isMLText)
{ {
// Check and warn if there are other values // Check and warn if there are other values
if (propertyValuesSize > 1) if (propertyValuesSize > 1)

View File

@@ -142,6 +142,10 @@ public class NodePropertyValue implements Cloneable, Serializable
{ {
return ((ContentDataId)value).getId(); return ((ContentDataId)value).getId();
} }
else if (value instanceof ContentDataWithId)
{
return ((ContentDataWithId)value).getId();
}
else else
{ {
return DefaultTypeConverter.INSTANCE.convert(Long.class, value); return DefaultTypeConverter.INSTANCE.convert(Long.class, value);
@@ -592,10 +596,6 @@ public class NodePropertyValue implements Cloneable, Serializable
{ {
return ValueType.DATE; return ValueType.DATE;
} }
else if (value instanceof ContentData)
{
return ValueType.CONTENT;
}
else if (value instanceof NodeRef) else if (value instanceof NodeRef)
{ {
return ValueType.NODEREF; return ValueType.NODEREF;
@@ -632,6 +632,14 @@ public class NodePropertyValue implements Cloneable, Serializable
{ {
return ValueType.CONTENT_DATA_ID; return ValueType.CONTENT_DATA_ID;
} }
else if (value instanceof ContentDataWithId)
{
return ValueType.CONTENT_DATA_ID;
}
else if (value instanceof ContentData)
{
return ValueType.CONTENT;
}
else else
{ {
// type is not recognised as belonging to any particular slot // type is not recognised as belonging to any particular slot

View File

@@ -1909,7 +1909,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest
QName.createQName("pathA"), QName.createQName("pathA"),
TYPE_QNAME_TEST_MANY_PROPERTIES).getChildRef(); TYPE_QNAME_TEST_MANY_PROPERTIES).getChildRef();
for (int inc = 0; inc < 3; inc++) for (int inc = 0; inc < 5; inc++)
{ {
System.out.println("----------------------------------------------"); System.out.println("----------------------------------------------");
int collectionSize = (int) Math.pow(10, inc); int collectionSize = (int) Math.pow(10, inc);