mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
Merged V2.1 to HEAD
6944: More hibernate session cache taming. 6945: Times for commits are close to linear in the number of items submitted. 6946: Missing break statement. (Courtesy of Jan). 6948: Fixed session cache eviction problem triggered by resetLayer(). 6956: Wrapped AVMService and AttributeService in TransactionResourceInterceptor. Reverted log4j.properties git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@7368 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -15,17 +15,20 @@
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing
|
||||
*/
|
||||
|
||||
package org.alfresco.repo.attributes;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.error.AlfrescoRuntimeException;
|
||||
import org.alfresco.repo.avm.AVMDAOs;
|
||||
|
||||
/**
|
||||
* Handles conversions between persistent and value based Attributes.
|
||||
@@ -34,7 +37,7 @@ import org.alfresco.error.AlfrescoRuntimeException;
|
||||
public class AttributeConverter
|
||||
{
|
||||
/**
|
||||
* Convert an Attribute (recursively) to a persistent attribute. This persists
|
||||
* Convert an Attribute (recursively) to a persistent attribute. This persists
|
||||
* the newly created Attribute immediately.
|
||||
* @param from The Attribute to clone.
|
||||
* @return The cloned persistent Attribute.
|
||||
@@ -96,56 +99,78 @@ public class AttributeConverter
|
||||
|
||||
public Attribute toValue(Attribute from)
|
||||
{
|
||||
Attribute ret = null;
|
||||
switch (from.getType())
|
||||
{
|
||||
case BOOLEAN :
|
||||
{
|
||||
return new BooleanAttributeValue((BooleanAttribute)from);
|
||||
ret = new BooleanAttributeValue((BooleanAttribute)from);
|
||||
break;
|
||||
}
|
||||
case BYTE :
|
||||
{
|
||||
return new ByteAttributeValue((ByteAttribute)from);
|
||||
ret = new ByteAttributeValue((ByteAttribute)from);
|
||||
break;
|
||||
}
|
||||
case SHORT :
|
||||
{
|
||||
return new ShortAttributeValue((ShortAttribute)from);
|
||||
ret = new ShortAttributeValue((ShortAttribute)from);
|
||||
break;
|
||||
}
|
||||
case INT :
|
||||
{
|
||||
return new IntAttributeValue((IntAttribute)from);
|
||||
ret = new IntAttributeValue((IntAttribute)from);
|
||||
break;
|
||||
}
|
||||
case LONG :
|
||||
{
|
||||
return new LongAttributeValue((LongAttribute)from);
|
||||
ret = new LongAttributeValue((LongAttribute)from);
|
||||
break;
|
||||
}
|
||||
case FLOAT :
|
||||
{
|
||||
return new FloatAttributeValue((FloatAttribute)from);
|
||||
ret = new FloatAttributeValue((FloatAttribute)from);
|
||||
break;
|
||||
}
|
||||
case DOUBLE :
|
||||
{
|
||||
return new DoubleAttributeValue((DoubleAttribute)from);
|
||||
ret = new DoubleAttributeValue((DoubleAttribute)from);
|
||||
break;
|
||||
}
|
||||
case STRING :
|
||||
{
|
||||
return new StringAttributeValue((StringAttribute)from);
|
||||
ret = new StringAttributeValue((StringAttribute)from);
|
||||
break;
|
||||
}
|
||||
case SERIALIZABLE :
|
||||
{
|
||||
return new SerializableAttributeValue((SerializableAttribute)from);
|
||||
ret = new SerializableAttributeValue((SerializableAttribute)from);
|
||||
break;
|
||||
}
|
||||
case MAP :
|
||||
{
|
||||
return new MapAttributeValue((MapAttribute)from);
|
||||
ret = new MapAttributeValue();
|
||||
for (Map.Entry<String, Attribute> entry : from.entrySet())
|
||||
{
|
||||
ret.put(entry.getKey(), toValue(entry.getValue()));
|
||||
}
|
||||
break;
|
||||
}
|
||||
case LIST :
|
||||
{
|
||||
return new ListAttributeValue((ListAttribute)from);
|
||||
ret = new ListAttributeValue();
|
||||
for (Attribute child : from)
|
||||
{
|
||||
ret.add(toValue(child));
|
||||
}
|
||||
break;
|
||||
}
|
||||
default :
|
||||
{
|
||||
throw new AlfrescoRuntimeException("Invalid Attribute Type: " + from.getType());
|
||||
}
|
||||
}
|
||||
AVMDAOs.Instance().fAttributeDAO.evictFlat(from);
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
@@ -68,4 +68,15 @@ public interface AttributeDAO
|
||||
* @param attr
|
||||
*/
|
||||
public void evict(Attribute attr);
|
||||
|
||||
/**
|
||||
* Evict an Attribute non-recursively.
|
||||
* @param attr
|
||||
*/
|
||||
public void evictFlat(Attribute attr);
|
||||
|
||||
/**
|
||||
* Force a flush.
|
||||
*/
|
||||
public void flush();
|
||||
}
|
||||
|
@@ -193,7 +193,6 @@ public class AttributeServiceImpl implements AttributeService
|
||||
return null;
|
||||
}
|
||||
Attribute converted = fAttributeConverter.toValue(found);
|
||||
fAttributeDAO.evict(found);
|
||||
return converted;
|
||||
}
|
||||
|
||||
@@ -255,7 +254,6 @@ public class AttributeServiceImpl implements AttributeService
|
||||
}
|
||||
Attribute converted = fAttributeConverter.toPersistent(value);
|
||||
found.put(name, converted);
|
||||
fAttributeDAO.evict(converted);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -316,6 +314,8 @@ public class AttributeServiceImpl implements AttributeService
|
||||
throw new AVMWrongTypeException("Attribute Not Map: " + keys);
|
||||
}
|
||||
found.remove(name);
|
||||
fAttributeDAO.flush();
|
||||
fAttributeDAO.evictFlat(found);
|
||||
}
|
||||
|
||||
private Attribute getAttributeFromPath(List<String> keys)
|
||||
@@ -330,11 +330,15 @@ public class AttributeServiceImpl implements AttributeService
|
||||
{
|
||||
if (current.getType() == Type.MAP)
|
||||
{
|
||||
current = current.get(keys.get(i));
|
||||
Attribute newCurrent = current.get(keys.get(i));
|
||||
fAttributeDAO.evictFlat(current);
|
||||
current = newCurrent;
|
||||
}
|
||||
else if (current.getType() == Type.LIST)
|
||||
{
|
||||
current = current.get(Integer.parseInt(keys.get(i)));
|
||||
Attribute newCurrent = current.get(Integer.parseInt(keys.get(i)));
|
||||
fAttributeDAO.evictFlat(current);
|
||||
current = newCurrent;
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -486,7 +490,6 @@ public class AttributeServiceImpl implements AttributeService
|
||||
}
|
||||
Attribute converted = fAttributeConverter.toPersistent(value);
|
||||
found.set(index, fAttributeConverter.toPersistent(value));
|
||||
fAttributeDAO.evict(converted);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -515,7 +518,13 @@ public class AttributeServiceImpl implements AttributeService
|
||||
{
|
||||
throw new AVMBadArgumentException("Illegal zero length keys list.");
|
||||
}
|
||||
return getAttributeFromPath(keys) != null;
|
||||
Attribute attr = getAttributeFromPath(keys);
|
||||
if (attr != null)
|
||||
{
|
||||
fAttributeDAO.evictFlat(attr);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
|
@@ -15,11 +15,11 @@
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing
|
||||
*/
|
||||
|
||||
@@ -49,7 +49,7 @@ public class MapAttributeImpl extends AttributeImpl implements MapAttribute
|
||||
public MapAttributeImpl()
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
public MapAttributeImpl(MapAttribute attr)
|
||||
{
|
||||
super(attr.getAcl());
|
||||
@@ -125,7 +125,7 @@ public class MapAttributeImpl extends AttributeImpl implements MapAttribute
|
||||
AVMDAOs.Instance().fMapEntryDAO.save(mapEntry);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.attributes.Attribute#getType()
|
||||
*/
|
||||
@@ -170,7 +170,8 @@ public class MapAttributeImpl extends AttributeImpl implements MapAttribute
|
||||
{
|
||||
return null;
|
||||
}
|
||||
return entry.getAttribute();
|
||||
Attribute attr = entry.getAttribute();
|
||||
return attr;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
|
@@ -3,7 +3,7 @@ package org.alfresco.repo.attributes;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Interface for MapEntry persistence.
|
||||
* Interface for MapEntry persistence.
|
||||
* @author britt
|
||||
*/
|
||||
public interface MapEntryDAO
|
||||
@@ -13,37 +13,43 @@ public interface MapEntryDAO
|
||||
* @param entry To save.
|
||||
*/
|
||||
public void save(MapEntry entry);
|
||||
|
||||
|
||||
/**
|
||||
* Delete a MapEntry.
|
||||
* @param entry
|
||||
*/
|
||||
public void delete(MapEntry entry);
|
||||
|
||||
|
||||
/**
|
||||
* Delete all entries for a map.
|
||||
* @param mapAttr The map to purge.
|
||||
*/
|
||||
public void delete(MapAttribute mapAttr);
|
||||
|
||||
|
||||
/**
|
||||
* Get an entry by name.
|
||||
* @param key The key of the entry.
|
||||
* @return A MapEntry or null.
|
||||
*/
|
||||
public MapEntry get(MapEntryKey key);
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve all the entries in a map.
|
||||
* @param mapAttr
|
||||
* @return A List of all entries in the given map.
|
||||
*/
|
||||
public List<MapEntry> get(MapAttribute mapAttr);
|
||||
|
||||
|
||||
/**
|
||||
* Get the number of entries in a MapAttribute.
|
||||
* @param mapAttr The MapAttribute/
|
||||
* @return The number of entries.
|
||||
*/
|
||||
public int size(MapAttribute mapAttr);
|
||||
|
||||
/**
|
||||
* Evict an entry.
|
||||
* @param entry
|
||||
*/
|
||||
public void evict(MapEntry entry);
|
||||
}
|
||||
|
@@ -27,8 +27,10 @@ package org.alfresco.repo.attributes.hibernate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.alfresco.repo.attributes.AttrQueryHelperImpl;
|
||||
import org.alfresco.repo.attributes.Attribute;
|
||||
@@ -40,12 +42,14 @@ import org.alfresco.repo.attributes.MapAttribute;
|
||||
import org.alfresco.repo.attributes.MapEntry;
|
||||
import org.alfresco.repo.attributes.MapEntryDAO;
|
||||
import org.alfresco.repo.attributes.Attribute.Type;
|
||||
import org.alfresco.repo.avm.hibernate.SessionCacheChecker;
|
||||
import org.alfresco.service.cmr.attributes.AttrQuery;
|
||||
import org.alfresco.service.cmr.attributes.AttrQueryHelper;
|
||||
import org.alfresco.util.Pair;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.hibernate.Query;
|
||||
import org.hibernate.engine.EntityKey;
|
||||
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
|
||||
|
||||
/**
|
||||
@@ -87,7 +91,6 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
|
||||
for (MapEntry entry : mapEntries)
|
||||
{
|
||||
Attribute subAttr = entry.getAttribute();
|
||||
getSession().evict(entry);
|
||||
fMapEntryDAO.delete(entry);
|
||||
delete(subAttr);
|
||||
}
|
||||
@@ -99,7 +102,6 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
|
||||
for (ListEntry entry : listEntries)
|
||||
{
|
||||
Attribute subAttr = entry.getAttribute();
|
||||
getSession().evict(entry);
|
||||
fListEntryDAO.delete(entry);
|
||||
delete(subAttr);
|
||||
}
|
||||
@@ -108,7 +110,6 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
|
||||
{
|
||||
fgLogger.debug("Entities: " + getSession().getStatistics().getEntityCount());
|
||||
}
|
||||
getSession().evict(attr);
|
||||
getSession().delete(attr);
|
||||
}
|
||||
|
||||
@@ -161,20 +162,20 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
|
||||
*/
|
||||
public void evict(Attribute attr)
|
||||
{
|
||||
if (attr.getType() == Attribute.Type.MAP)
|
||||
{
|
||||
for (Attribute child : attr.values())
|
||||
{
|
||||
evict(child);
|
||||
}
|
||||
}
|
||||
if (attr.getType() == Attribute.Type.LIST)
|
||||
{
|
||||
for (Attribute child : attr)
|
||||
{
|
||||
evict(child);
|
||||
}
|
||||
}
|
||||
getSession().evict(attr);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.attributes.AttributeDAO#flush()
|
||||
*/
|
||||
public void flush()
|
||||
{
|
||||
getSession().flush();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.attributes.AttributeDAO#evictFlat(org.alfresco.repo.attributes.Attribute)
|
||||
*/
|
||||
public void evictFlat(Attribute attr)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
@@ -15,11 +15,11 @@
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing
|
||||
*/
|
||||
|
||||
@@ -96,4 +96,11 @@ public class MapEntryDAOHibernate extends HibernateDaoSupport implements
|
||||
query.setEntity("map", mapAttr);
|
||||
return ((Long)query.uniqueResult()).intValue();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.alfresco.repo.attributes.MapEntryDAO#evict(org.alfresco.repo.attributes.MapEntry)
|
||||
*/
|
||||
public void evict(MapEntry entry)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
@@ -340,8 +340,6 @@ public class AVMRepository
|
||||
dir.putChild(name, child);
|
||||
fLookupCache.onWrite(pathParts[0]);
|
||||
AVMNodeDescriptor desc = child.getDescriptor(parent.getPath(), name, parent.getIndirection(), parent.getIndirectionVersion());
|
||||
fAVMNodeDAO.flush();
|
||||
fAVMNodeDAO.evict(child);
|
||||
return desc;
|
||||
}
|
||||
|
||||
@@ -1046,7 +1044,8 @@ public class AVMRepository
|
||||
throw new AVMWrongTypeException("Not a directory.");
|
||||
}
|
||||
DirectoryNode dirNode = (DirectoryNode)node;
|
||||
return dirNode.getListing(dir, includeDeleted);
|
||||
SortedMap<String, AVMNodeDescriptor> listing = dirNode.getListing(dir, includeDeleted);
|
||||
return listing;
|
||||
}
|
||||
finally
|
||||
{
|
||||
@@ -2483,8 +2482,6 @@ public class AVMRepository
|
||||
}
|
||||
LayeredDirectoryNode dir = (LayeredDirectoryNode)node;
|
||||
dir.flatten(name);
|
||||
fAVMNodeDAO.flush();
|
||||
fAVMNodeDAO.evict(dir);
|
||||
}
|
||||
finally
|
||||
{
|
||||
@@ -2516,8 +2513,6 @@ public class AVMRepository
|
||||
}
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
AVMNodeDescriptor desc = node.getDescriptor(lPath);
|
||||
fAVMNodeDAO.flush();
|
||||
fAVMNodeDAO.evict(node);
|
||||
return desc;
|
||||
}
|
||||
finally
|
||||
@@ -2738,7 +2733,6 @@ public class AVMRepository
|
||||
throw new AVMNotFoundException("Node not found: " + desc);
|
||||
}
|
||||
Set<QName> aspects = node.getAspects();
|
||||
fAVMNodeDAO.evict(node);
|
||||
return aspects;
|
||||
}
|
||||
}
|
||||
|
@@ -361,8 +361,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
{
|
||||
newDir.getProperties().putAll(properties);
|
||||
}
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(newDir);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -407,8 +405,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
dir.updateModTime();
|
||||
dir.putChild(name, newDir);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(newDir);
|
||||
// newDir.setVersionID(getNextVersionID());
|
||||
}
|
||||
|
||||
@@ -445,8 +441,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
-1,
|
||||
"UTF-8"));
|
||||
ContentWriter writer = createContentWriter(AVMNodeConverter.ExtendAVMPath(path, name));
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(file);
|
||||
return writer.getContentOutputStream();
|
||||
}
|
||||
|
||||
@@ -490,10 +484,9 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
{
|
||||
file.getProperties().putAll(properties);
|
||||
}
|
||||
ContentWriter writer = createContentWriter(AVMNodeConverter.ExtendAVMPath(path, name));
|
||||
// Yet another flush.
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(file);
|
||||
ContentWriter writer = createContentWriter(AVMNodeConverter.ExtendAVMPath(path, name));
|
||||
writer.putContent(data);
|
||||
}
|
||||
|
||||
@@ -526,8 +519,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
dir.updateModTime();
|
||||
dir.putChild(name, newFile);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(newFile);
|
||||
// newFile.setVersionID(getNextVersionID());
|
||||
}
|
||||
|
||||
@@ -633,7 +624,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode child = AVMNodeUnwrapper.Unwrap(listing.get(name));
|
||||
AVMNodeDescriptor desc = child.getDescriptor(lPath, name);
|
||||
results.put(name, desc);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(child);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
@@ -653,8 +643,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
DirectoryNode dir = (DirectoryNode)lPath.getCurrentNode();
|
||||
List<String> deleted = dir.getDeletedNames();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
|
||||
return deleted;
|
||||
}
|
||||
|
||||
@@ -688,8 +676,8 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
dir.removeChild(lPath, name);
|
||||
dir.updateModTime();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
|
||||
// AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
// AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -711,8 +699,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
((LayeredDirectoryNode)node).uncover(lPath, name);
|
||||
node.updateModTime();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
// TODO This is problematic. As time goes on this returns
|
||||
@@ -882,8 +868,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
dir.turnPrimary(lPath);
|
||||
dir.updateModTime();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -905,8 +889,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
dir.retarget(lPath, target);
|
||||
dir.updateModTime();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1080,8 +1062,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
((LayeredDirectoryNode)node).setOpacity(opacity);
|
||||
node.updateModTime();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
// TODO Does it make sense to set properties on DeletedNodes?
|
||||
@@ -1101,8 +1081,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.setProperty(name, value);
|
||||
node.setGuid(GUID.generate());
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1120,8 +1098,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.addProperties(properties);
|
||||
node.setGuid(GUID.generate());
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1140,8 +1116,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
PropertyValue prop = node.getProperty(name);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
return prop;
|
||||
}
|
||||
|
||||
@@ -1160,8 +1134,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
Map<QName, PropertyValue> props = node.getProperties();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
return props;
|
||||
}
|
||||
|
||||
@@ -1180,8 +1152,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.setGuid(GUID.generate());
|
||||
node.deleteProperty(name);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1198,8 +1168,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.setGuid(GUID.generate());
|
||||
node.deleteProperties();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1287,8 +1255,8 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
throw new AVMWrongTypeException("File Expected.");
|
||||
}
|
||||
ContentData content = ((FileNode)node).getContentData(lPath);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
// AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
// AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
return content;
|
||||
}
|
||||
|
||||
@@ -1312,8 +1280,8 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
node.updateModTime();
|
||||
node.setGuid(GUID.generate());
|
||||
ContentData content = ((FileNode)node).getContentData(lPath);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
// AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
// AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
return content;
|
||||
}
|
||||
|
||||
@@ -1335,8 +1303,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
throw new AVMWrongTypeException("File Expected.");
|
||||
}
|
||||
((FileNode)node).setContentData(data);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1354,8 +1320,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.copyMetaDataFrom(from);
|
||||
node.setGuid(GUID.generate());
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1373,8 +1337,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.getAspects().add(aspectName);
|
||||
node.setGuid(GUID.generate());
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1392,8 +1354,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
Set<QName> aspects = node.getAspects();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
return aspects;
|
||||
}
|
||||
|
||||
@@ -1419,8 +1379,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
node.getProperties().remove(name);
|
||||
}
|
||||
node.setGuid(GUID.generate());
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1439,7 +1397,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
boolean has = node.getAspects().contains(aspectName);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
return has;
|
||||
}
|
||||
|
||||
@@ -1458,8 +1415,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.setAcl(acl);
|
||||
node.setGuid(GUID.generate());
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1532,9 +1487,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
toLink.getAspects().add(WCMModel.ASPECT_REVERTED);
|
||||
PropertyValue value = new PropertyValue(null, toRevertTo.getId());
|
||||
toLink.setProperty(WCMModel.PROP_REVERTED_ID, value);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(toLink);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -1549,8 +1501,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
AVMNode node = lPath.getCurrentNode();
|
||||
node.setGuid(guid);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -1570,8 +1520,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
PlainFileNode file = (PlainFileNode)node;
|
||||
file.setEncoding(encoding);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(file);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -1591,7 +1539,5 @@ public class AVMStoreImpl implements AVMStore, Serializable
|
||||
}
|
||||
PlainFileNode file = (PlainFileNode)node;
|
||||
file.setMimeType(mimeType);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(file);
|
||||
}
|
||||
}
|
||||
|
@@ -50,7 +50,7 @@ import org.apache.commons.logging.LogFactory;
|
||||
public class AVMSyncServiceImpl implements AVMSyncService
|
||||
{
|
||||
private static Log fgLogger = LogFactory.getLog(AVMSyncServiceImpl.class);
|
||||
|
||||
|
||||
/**
|
||||
* The AVMService.
|
||||
*/
|
||||
@@ -60,7 +60,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
* The AVMRepository.
|
||||
*/
|
||||
private AVMRepository fAVMRepository;
|
||||
|
||||
|
||||
/**
|
||||
* Do nothing constructor.
|
||||
*/
|
||||
@@ -69,19 +69,19 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the AVM Service. For Spring.
|
||||
* Set the AVM Service. For Spring.
|
||||
* @param avmService The AVMService reference.
|
||||
*/
|
||||
public void setAvmService(AVMService avmService)
|
||||
{
|
||||
fAVMService = avmService;
|
||||
}
|
||||
|
||||
|
||||
public void setAvmRepository(AVMRepository avmRepository)
|
||||
{
|
||||
fAVMRepository = avmRepository;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get a difference list between two corresponding node trees.
|
||||
* @param srcVersion The version id for the source tree.
|
||||
@@ -92,7 +92,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
* @return A List of AVMDifference structs which can be used for
|
||||
* the update operation.
|
||||
*/
|
||||
public List<AVMDifference> compare(int srcVersion, String srcPath,
|
||||
public List<AVMDifference> compare(int srcVersion, String srcPath,
|
||||
int dstVersion, String dstPath,
|
||||
NameMatcher excluder)
|
||||
{
|
||||
@@ -133,7 +133,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Internal recursive implementation of compare.
|
||||
* @param srcVersion The version of the source tree.
|
||||
@@ -171,9 +171,9 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
case AVMDifference.DIRECTORY :
|
||||
{
|
||||
// First special case: source is a layered directory which points to
|
||||
// First special case: source is a layered directory which points to
|
||||
// the destinations path, and we are comparing 'head' versions.
|
||||
if (srcDesc.isLayeredDirectory() &&
|
||||
if (srcDesc.isLayeredDirectory() &&
|
||||
srcDesc.getIndirection().equals(dstDesc.getPath()) && srcVersion < 0 && dstVersion < 0)
|
||||
{
|
||||
// Get only a direct listing, since that's all that can be different.
|
||||
@@ -202,7 +202,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
{
|
||||
// A missing destination child means the source is NEWER.
|
||||
result.add(new AVMDifference(srcVersion, srcChild.getPath(),
|
||||
dstVersion,
|
||||
dstVersion,
|
||||
dstPath,
|
||||
AVMDifference.NEWER));
|
||||
continue;
|
||||
@@ -242,14 +242,14 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
if (srcChild == null)
|
||||
{
|
||||
// Missing means the source is older.
|
||||
result.add(new AVMDifference(srcVersion,
|
||||
result.add(new AVMDifference(srcVersion,
|
||||
srcPath,
|
||||
dstVersion, dstChild.getPath(),
|
||||
AVMDifference.OLDER));
|
||||
continue;
|
||||
}
|
||||
// Otherwise, recursively invoke.
|
||||
compare(srcVersion, srcChild,
|
||||
compare(srcVersion, srcChild,
|
||||
dstVersion, dstChild,
|
||||
result, excluder);
|
||||
}
|
||||
@@ -294,7 +294,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
AVMNodeDescriptor dstChild = dstList.get(name);
|
||||
String srcPath = AVMNodeConverter.ExtendAVMPath(srcDesc.getPath(), name);
|
||||
if (excluder != null && (excluder.matches(srcPath) ||
|
||||
if (excluder != null && (excluder.matches(srcPath) ||
|
||||
excluder.matches(dstChild.getPath())))
|
||||
{
|
||||
continue;
|
||||
@@ -313,7 +313,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Updates the destination nodes in the AVMDifferences
|
||||
* with the source nodes. Normally any conflicts or cases in
|
||||
@@ -321,10 +321,10 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
* will cause the transaction to roll back.
|
||||
* @param diffList A List of AVMDifference structs.
|
||||
* @param excluder A possibly null name matcher to exclude unwanted updates.
|
||||
* @param ignoreConflicts If this is true the update will skip those
|
||||
* AVMDifferences which are in conflict with
|
||||
* @param ignoreConflicts If this is true the update will skip those
|
||||
* AVMDifferences which are in conflict with
|
||||
* the destination.
|
||||
* @param ignoreOlder If this is true the update will skip those
|
||||
* @param ignoreOlder If this is true the update will skip those
|
||||
* AVMDifferences which have the source older than the destination.
|
||||
* @param overrideConflicts If this is true the update will override conflicting
|
||||
* AVMDifferences and replace the destination with the conflicting source.
|
||||
@@ -333,10 +333,11 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
* @param description Full update blurb.
|
||||
* in which the source is older than the destination and overwrite the destination.
|
||||
*/
|
||||
public void update(List<AVMDifference> diffList,
|
||||
public void update(List<AVMDifference> diffList,
|
||||
NameMatcher excluder, boolean ignoreConflicts, boolean ignoreOlder,
|
||||
boolean overrideConflicts, boolean overrideOlder, String tag, String description)
|
||||
{
|
||||
long start = System.currentTimeMillis();
|
||||
if (fgLogger.isDebugEnabled())
|
||||
{
|
||||
try
|
||||
@@ -398,7 +399,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
// The default is that the source is newer in the case where
|
||||
// the destination doesn't exist.
|
||||
int diffCode = AVMDifference.NEWER;
|
||||
if (dstDesc != null)
|
||||
if (dstDesc != null)
|
||||
{
|
||||
diffCode = compareOne(srcDesc, dstDesc);
|
||||
}
|
||||
@@ -472,6 +473,10 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
{
|
||||
fAVMService.createSnapshot(storeName, tag, description);
|
||||
}
|
||||
if (fgLogger.isDebugEnabled())
|
||||
{
|
||||
fgLogger.debug("Raw Update: " + (System.currentTimeMillis() - start));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -501,7 +506,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
fAVMService.link(parentPath, name, toLink);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Recursively copy a node into the given position.
|
||||
* @param parentPath The place to put it.
|
||||
@@ -521,7 +526,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
recursiveCopy(parentDesc, entry.getKey(), entry.getValue(), excluder);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Shortcutting helper that uses an AVMNodeDescriptor parent.
|
||||
* @param parent The parent we are linking into.
|
||||
@@ -531,7 +536,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
private void recursiveCopy(AVMNodeDescriptor parent, String name, AVMNodeDescriptor toCopy, NameMatcher excluder)
|
||||
{
|
||||
String newPath = AVMNodeConverter.ExtendAVMPath(parent.getPath(), name);
|
||||
if (excluder != null && (excluder.matches(newPath) ||
|
||||
if (excluder != null && (excluder.matches(newPath) ||
|
||||
excluder.matches(toCopy.getPath())))
|
||||
{
|
||||
return;
|
||||
@@ -546,17 +551,17 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
// children into it.
|
||||
AVMNodeDescriptor newParentDesc = fAVMRepository.createDirectory(parent, name);
|
||||
fAVMService.setMetaDataFrom(newParentDesc.getPath(), toCopy);
|
||||
Map<String, AVMNodeDescriptor> children =
|
||||
Map<String, AVMNodeDescriptor> children =
|
||||
fAVMService.getDirectoryListing(toCopy, true);
|
||||
for (Map.Entry<String, AVMNodeDescriptor> entry : children.entrySet())
|
||||
{
|
||||
recursiveCopy(newParentDesc, entry.getKey(), entry.getValue(), excluder);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The workhorse of comparison and updating. Determine the versioning relationship
|
||||
* of two nodes.
|
||||
* of two nodes.
|
||||
* @param srcDesc Descriptor for the source node.
|
||||
* @param dstDesc Descriptor for the destination node.
|
||||
* @return One of SAME, OLDER, NEWER, CONFLICT, DIRECTORY
|
||||
@@ -631,7 +636,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
if (common.getId() == dstDesc.getId())
|
||||
{
|
||||
return AVMDifference.NEWER;
|
||||
return AVMDifference.NEWER;
|
||||
}
|
||||
// Finally we know they are in conflict.
|
||||
return AVMDifference.CONFLICT;
|
||||
@@ -666,12 +671,12 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
// The must, finally, be in conflict.
|
||||
return AVMDifference.CONFLICT;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Flattens a layer so that all all nodes under and including
|
||||
* <code>layerPath</code> become translucent to any nodes in the
|
||||
* <code>layerPath</code> become translucent to any nodes in the
|
||||
* corresponding location under and including <code>underlyingPath</code>
|
||||
* that are the same version.
|
||||
* that are the same version.
|
||||
* @param layerPath The overlying layer path.
|
||||
* @param underlyingPath The underlying path.
|
||||
*/
|
||||
@@ -705,7 +710,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
flatten(layerNode, underlyingNode);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This is the implementation of flatten.
|
||||
* @param layer The on top node.
|
||||
@@ -747,7 +752,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
return true;
|
||||
}
|
||||
// Grab the listing
|
||||
// Grab the listing
|
||||
Map<String, AVMNodeDescriptor> underListing =
|
||||
fAVMService.getDirectoryListing(underlying, true);
|
||||
boolean flattened = true;
|
||||
@@ -783,10 +788,10 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
}
|
||||
return flattened;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Takes a layer, deletes it and recreates it pointing at the same underlying
|
||||
* node. Any changes in the layer are lost (except to history if the layer has been
|
||||
* node. Any changes in the layer are lost (except to history if the layer has been
|
||||
* snapshotted.)
|
||||
* @param layerPath
|
||||
*/
|
||||
@@ -801,7 +806,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
|
||||
fAVMService.removeNode(parts[0], parts[1]);
|
||||
fAVMService.createLayeredDirectory(desc.getIndirection(), parts[0], parts[1]);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Make sure this entire directory path exists.
|
||||
* @param path
|
||||
|
@@ -39,7 +39,7 @@ abstract class DirectoryNodeImpl extends AVMNodeImpl implements DirectoryNode
|
||||
protected DirectoryNodeImpl()
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A pass through constructor. Called when a new concrete subclass
|
||||
* instance is created.
|
||||
@@ -50,7 +50,7 @@ abstract class DirectoryNodeImpl extends AVMNodeImpl implements DirectoryNode
|
||||
{
|
||||
super(id, repo);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Dangerous version of link.
|
||||
* @param name The name to give the child.
|
||||
@@ -72,5 +72,8 @@ abstract class DirectoryNodeImpl extends AVMNodeImpl implements DirectoryNode
|
||||
ChildKey key = new ChildKey(this, name);
|
||||
ChildEntry newChild = new ChildEntryImpl(key, node);
|
||||
AVMDAOs.Instance().fChildEntryDAO.save(newChild);
|
||||
}
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(newChild);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
|
||||
}
|
||||
}
|
||||
|
@@ -402,7 +402,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
{
|
||||
listing.put(entry.getKey().getName(), entry.getChild());
|
||||
}
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
|
||||
}
|
||||
return listing;
|
||||
}
|
||||
@@ -421,7 +420,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
{
|
||||
listing.put(entry.getKey().getName(), entry.getChild());
|
||||
}
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
|
||||
}
|
||||
return listing;
|
||||
}
|
||||
@@ -447,8 +445,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
AVMNodeDescriptor childDesc =
|
||||
childNode.getDescriptor(dir.getPath(), child.getKey().getName(), dir.getIndirection(), dir.getIndirectionVersion());
|
||||
listing.put(child.getKey().getName(), childDesc);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(childNode);
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(child);
|
||||
}
|
||||
return listing;
|
||||
}
|
||||
@@ -481,7 +477,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
listing.get(name).getDescriptor(dir.getPath(), name,
|
||||
lookup.getCurrentIndirection(),
|
||||
lookup.getCurrentIndirectionVersion()));
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(listing.get(name));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -499,8 +494,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
child.getKey().getName(),
|
||||
dir.getIndirection(),
|
||||
dir.getIndirectionVersion()));
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(child.getChild());
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(child);
|
||||
}
|
||||
}
|
||||
return baseListing;
|
||||
@@ -543,7 +536,8 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
{
|
||||
return null;
|
||||
}
|
||||
return new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true);
|
||||
Pair<AVMNode, Boolean> result = new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true);
|
||||
return result;
|
||||
}
|
||||
// Don't check our underlying directory if we are opaque.
|
||||
if (fOpacity)
|
||||
@@ -593,8 +587,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
name,
|
||||
mine.getIndirection(),
|
||||
mine.getIndirectionVersion());
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(entry.getChild());
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
|
||||
return desc;
|
||||
}
|
||||
// If we are opaque don't check underneath.
|
||||
@@ -612,7 +604,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
return null;
|
||||
}
|
||||
AVMNodeDescriptor desc = child.getFirst().getDescriptor(lookup);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(child.getFirst());
|
||||
return desc;
|
||||
}
|
||||
else
|
||||
@@ -952,7 +943,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
|
||||
{
|
||||
ChildEntry entry = AVMDAOs.Instance().fChildEntryDAO.get(key);
|
||||
AVMDAOs.Instance().fChildEntryDAO.delete(entry);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.flush();
|
||||
}
|
||||
}
|
||||
// Make the new ChildEntry and save.
|
||||
|
@@ -115,7 +115,6 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
|
||||
continue;
|
||||
}
|
||||
result.put(child.getKey().getName(), AVMNodeUnwrapper.Unwrap(child.getChild()));
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(child);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -166,8 +165,6 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
|
||||
child.getKey().getName(),
|
||||
dir.getIndirection(),
|
||||
dir.getIndirectionVersion()));
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(child.getChild());
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(child);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -193,14 +190,19 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
|
||||
{
|
||||
ChildKey key = new ChildKey(this, name);
|
||||
ChildEntry entry = AVMDAOs.Instance().fChildEntryDAO.get(key);
|
||||
if (entry == null ||
|
||||
(!includeDeleted && entry.getChild().getType() == AVMNodeType.DELETED_NODE))
|
||||
if (entry == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
if (!includeDeleted && entry.getChild().getType() == AVMNodeType.DELETED_NODE)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
// We're doing the hand unrolling of the proxy because
|
||||
// Hibernate/CGLIB proxies are broken.
|
||||
return new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true);
|
||||
|
||||
Pair<AVMNode, Boolean> result = new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -223,8 +225,6 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
|
||||
return null;
|
||||
}
|
||||
AVMNodeDescriptor desc = entry.getChild().getDescriptor(mine.getPath(), name, (String)null, -1);
|
||||
AVMDAOs.Instance().fAVMNodeDAO.evict(entry.getChild());
|
||||
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
|
||||
return desc;
|
||||
}
|
||||
|
||||
|
@@ -57,6 +57,7 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
|
||||
public void save(AVMNode node)
|
||||
{
|
||||
getSession().save(node);
|
||||
SessionCacheChecker.instance.check();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -66,6 +67,7 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
|
||||
public void delete(AVMNode node)
|
||||
{
|
||||
getSession().delete(node);
|
||||
SessionCacheChecker.instance.check();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -74,6 +76,7 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
|
||||
*/
|
||||
public AVMNode getByID(long id)
|
||||
{
|
||||
SessionCacheChecker.instance.check();
|
||||
return AVMNodeUnwrapper.Unwrap((AVMNode)getSession().get(AVMNodeImpl.class, id));
|
||||
}
|
||||
|
||||
@@ -215,6 +218,5 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
|
||||
*/
|
||||
public void evict(AVMNode node)
|
||||
{
|
||||
getSession().evict(node);
|
||||
}
|
||||
}
|
||||
|
@@ -66,6 +66,7 @@ class ChildEntryDAOHibernate extends HibernateDaoSupport implements
|
||||
*/
|
||||
public ChildEntry get(ChildKey key)
|
||||
{
|
||||
SessionCacheChecker.instance.check();
|
||||
return (ChildEntry)getSession().get(ChildEntryImpl.class, key);
|
||||
}
|
||||
|
||||
@@ -96,6 +97,7 @@ class ChildEntryDAOHibernate extends HibernateDaoSupport implements
|
||||
"and ce.child = :child");
|
||||
query.setEntity("parent", parent);
|
||||
query.setEntity("child", child);
|
||||
SessionCacheChecker.instance.check();
|
||||
return (ChildEntry)query.uniqueResult();
|
||||
}
|
||||
|
||||
@@ -148,6 +150,5 @@ class ChildEntryDAOHibernate extends HibernateDaoSupport implements
|
||||
*/
|
||||
public void evict(ChildEntry entry)
|
||||
{
|
||||
getSession().evict(entry);
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
*
|
||||
*/
|
||||
package org.alfresco.repo.avm.hibernate;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.hibernate.engine.EntityKey;
|
||||
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
|
||||
|
||||
/**
|
||||
* @author britt
|
||||
*/
|
||||
public class SessionCacheChecker extends HibernateDaoSupport
|
||||
{
|
||||
public static SessionCacheChecker instance = null;
|
||||
|
||||
private static Log fgLogger = LogFactory.getLog(SessionCacheChecker.class);
|
||||
|
||||
private int fCount = 0;
|
||||
|
||||
public SessionCacheChecker()
|
||||
{
|
||||
instance = this;
|
||||
}
|
||||
|
||||
public void check()
|
||||
{
|
||||
if (!fgLogger.isDebugEnabled())
|
||||
{
|
||||
return;
|
||||
}
|
||||
if (fCount % 1000 == 0)
|
||||
{
|
||||
Map<String, Integer> types = new HashMap<String, Integer>();
|
||||
Set<EntityKey> keys = (Set<EntityKey>)getSession().getStatistics().getEntityKeys();
|
||||
if (keys.size() > 200)
|
||||
{
|
||||
for (EntityKey key : keys)
|
||||
{
|
||||
String name = key.getEntityName();
|
||||
if (!types.containsKey(name))
|
||||
{
|
||||
types.put(name, 0);
|
||||
}
|
||||
types.put(name, types.get(name) + 1);
|
||||
}
|
||||
fgLogger.debug(types);
|
||||
// for (Object it : Thread.currentThread().getStackTrace())
|
||||
// {
|
||||
// fgLogger.debug(it);
|
||||
// }
|
||||
// fCount = 0;
|
||||
}
|
||||
}
|
||||
fCount++;
|
||||
}
|
||||
}
|
@@ -15,11 +15,11 @@
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing
|
||||
*/
|
||||
package org.alfresco.repo.domain.hibernate;
|
||||
@@ -27,6 +27,7 @@ package org.alfresco.repo.domain.hibernate;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Map;
|
||||
|
||||
import org.alfresco.repo.avm.hibernate.SessionCacheChecker;
|
||||
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||
import org.alfresco.util.resource.MethodResourceManager;
|
||||
import org.apache.commons.logging.Log;
|
||||
@@ -44,9 +45,9 @@ import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
|
||||
* passes stateful objects back and forth. There must be no <code>Session</code>-linked
|
||||
* objects up the stack from where this instance resides. Failure to observe this will
|
||||
* most likely result in data loss of a sporadic nature.
|
||||
*
|
||||
*
|
||||
* @see org.alfresco.repo.domain.hibernate.HibernateNodeTest#testPostCommitClearIssue()
|
||||
*
|
||||
*
|
||||
* @author Derek Hulley
|
||||
*/
|
||||
public class SessionSizeResourceManager extends HibernateDaoSupport implements MethodResourceManager
|
||||
@@ -55,7 +56,7 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
|
||||
private static final String KEY_DISABLE_IN_TRANSACTION = "SessionSizeResourceManager.DisableInTransaction";
|
||||
|
||||
private static Log logger = LogFactory.getLog(SessionSizeResourceManager.class);
|
||||
|
||||
|
||||
/** Default 1000 */
|
||||
private int threshold;
|
||||
|
||||
@@ -71,7 +72,7 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
|
||||
/**
|
||||
* @return Returns true if the resource management must be ignored in the current transaction.
|
||||
* If <code>false</code>, the global setting will take effect.
|
||||
*
|
||||
*
|
||||
* @see #setDisableInTransaction()
|
||||
*/
|
||||
public static boolean isDisableInTransaction()
|
||||
@@ -86,7 +87,7 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Default public constructor required for bean instantiation.
|
||||
*/
|
||||
@@ -94,14 +95,14 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
|
||||
{
|
||||
this.threshold = 1000;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the {@link Session#clear()} threshold. If the number of entities and collections in the
|
||||
* current session exceeds this number, then the session will be cleared. Have you read the
|
||||
* disclaimer?
|
||||
*
|
||||
*
|
||||
* @param threshold the maximum number of entities and associations to keep in memory
|
||||
*
|
||||
*
|
||||
* @see #threshold
|
||||
*/
|
||||
public void setThreshold(int threshold)
|
||||
@@ -114,6 +115,11 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
|
||||
long transactionElapsedTimeNs,
|
||||
Method currentMethod)
|
||||
{
|
||||
if (logger.isDebugEnabled())
|
||||
{
|
||||
logger.debug("Session Size Manager Invoked.");
|
||||
SessionCacheChecker.instance.check();
|
||||
}
|
||||
if (isDisableInTransaction())
|
||||
{
|
||||
// Don't do anything
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
package org.alfresco.repo.transaction;
|
||||
|
||||
@@ -10,6 +10,7 @@ import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.hibernate.StaleObjectStateException;
|
||||
import org.hibernate.StaleStateException;
|
||||
import org.hibernate.exception.LockAcquisitionException;
|
||||
import org.springframework.aop.framework.ReflectiveMethodInvocation;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
@@ -19,38 +20,38 @@ import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @author britt
|
||||
*/
|
||||
public class RetryingTransactionAdvice implements MethodInterceptor
|
||||
public class RetryingTransactionAdvice implements MethodInterceptor
|
||||
{
|
||||
private static Log fgLogger = LogFactory.getLog(RetryingTransactionAdvice.class);
|
||||
|
||||
|
||||
/**
|
||||
* The transaction manager instance.
|
||||
*/
|
||||
private PlatformTransactionManager fTxnManager;
|
||||
|
||||
|
||||
/**
|
||||
* The TransactionDefinition.
|
||||
*/
|
||||
private TransactionDefinition fDefinition;
|
||||
|
||||
|
||||
/**
|
||||
* The maximum number of retries.
|
||||
*/
|
||||
private int fMaxRetries;
|
||||
|
||||
|
||||
/**
|
||||
* A Random number generator for getting retry intervals.
|
||||
*/
|
||||
private Random fRandom;
|
||||
|
||||
|
||||
public RetryingTransactionAdvice()
|
||||
{
|
||||
fRandom = new Random(System.currentTimeMillis());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Setter.
|
||||
*/
|
||||
@@ -66,7 +67,7 @@ public class RetryingTransactionAdvice implements MethodInterceptor
|
||||
{
|
||||
fDefinition = def;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Setter.
|
||||
*/
|
||||
@@ -74,11 +75,11 @@ public class RetryingTransactionAdvice implements MethodInterceptor
|
||||
{
|
||||
fMaxRetries = maxRetries;
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable
|
||||
{
|
||||
RuntimeException lastException = null;
|
||||
for (int count = 0; fMaxRetries < -1 || count < fMaxRetries; count++)
|
||||
@@ -101,9 +102,9 @@ public class RetryingTransactionAdvice implements MethodInterceptor
|
||||
{
|
||||
fgLogger.debug("Transaction succeeded after " + count + " retries.");
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
catch (RuntimeException e)
|
||||
{
|
||||
if (txn != null && isNewTxn && !txn.isCompleted())
|
||||
@@ -122,7 +123,8 @@ public class RetryingTransactionAdvice implements MethodInterceptor
|
||||
if (t instanceof ConcurrencyFailureException ||
|
||||
t instanceof DeadlockLoserDataAccessException ||
|
||||
t instanceof StaleObjectStateException ||
|
||||
t instanceof LockAcquisitionException)
|
||||
t instanceof LockAcquisitionException ||
|
||||
t instanceof StaleStateException)
|
||||
{
|
||||
shouldRetry = true;
|
||||
try
|
||||
@@ -131,7 +133,7 @@ public class RetryingTransactionAdvice implements MethodInterceptor
|
||||
}
|
||||
catch (InterruptedException ie)
|
||||
{
|
||||
// Do nothing.
|
||||
// Do nothing.
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@@ -15,11 +15,11 @@
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* As a special exception to the terms and conditions of version 2.0 of
|
||||
* the GPL, you may redistribute this Program in connection with Free/Libre
|
||||
* and Open Source Software ("FLOSS") applications as described in Alfresco's
|
||||
* FLOSS exception. You should have recieved a copy of the text describing
|
||||
* the FLOSS exception, and it is also available here:
|
||||
* http://www.alfresco.com/legal/licensing"
|
||||
*/
|
||||
package org.alfresco.repo.transaction;
|
||||
@@ -39,6 +39,7 @@ import org.alfresco.service.transaction.TransactionService;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.hibernate.StaleObjectStateException;
|
||||
import org.hibernate.StaleStateException;
|
||||
import org.hibernate.exception.LockAcquisitionException;
|
||||
import org.springframework.dao.ConcurrencyFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
@@ -46,17 +47,17 @@ import org.springframework.dao.DeadlockLoserDataAccessException;
|
||||
import org.springframework.jdbc.UncategorizedSQLException;
|
||||
|
||||
/**
|
||||
* A helper that runs a unit of work inside a UserTransaction,
|
||||
* transparently retrying the unit of work if the cause of
|
||||
* A helper that runs a unit of work inside a UserTransaction,
|
||||
* transparently retrying the unit of work if the cause of
|
||||
* failure is an optimistic locking or deadlock condition.
|
||||
*
|
||||
*
|
||||
* @author britt
|
||||
*/
|
||||
public class RetryingTransactionHelper
|
||||
{
|
||||
private static final String MSG_READ_ONLY = "permissions.err_read_only";
|
||||
private static Log logger = LogFactory.getLog(RetryingTransactionHelper.class);
|
||||
|
||||
|
||||
/**
|
||||
* Exceptions that trigger retries.
|
||||
*/
|
||||
@@ -69,30 +70,31 @@ public class RetryingTransactionHelper
|
||||
StaleObjectStateException.class,
|
||||
LockAcquisitionException.class,
|
||||
BatchUpdateException.class,
|
||||
DataIntegrityViolationException.class
|
||||
DataIntegrityViolationException.class,
|
||||
StaleStateException.class
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Reference to the TransactionService instance.
|
||||
*/
|
||||
private TransactionService txnService;
|
||||
|
||||
|
||||
/**
|
||||
* The maximum number of retries. -1 for infinity.
|
||||
*/
|
||||
private int maxRetries;
|
||||
|
||||
|
||||
/**
|
||||
* Whether the the transactions may only be reads
|
||||
*/
|
||||
private boolean readOnly;
|
||||
|
||||
|
||||
/**
|
||||
* Random number generator for retry delays.
|
||||
*/
|
||||
private Random random;
|
||||
|
||||
|
||||
/**
|
||||
* Callback interface
|
||||
* @author britt
|
||||
@@ -101,7 +103,7 @@ public class RetryingTransactionHelper
|
||||
{
|
||||
/**
|
||||
* Perform a unit of transactional work.
|
||||
*
|
||||
*
|
||||
* @return Return the result of the unit of work
|
||||
* @throws Throwable This can be anything and will guarantee either a retry or a rollback
|
||||
*/
|
||||
@@ -115,7 +117,7 @@ public class RetryingTransactionHelper
|
||||
{
|
||||
this.random = new Random(System.currentTimeMillis());
|
||||
}
|
||||
|
||||
|
||||
// Setters.
|
||||
/**
|
||||
* Set the TransactionService.
|
||||
@@ -124,7 +126,7 @@ public class RetryingTransactionHelper
|
||||
{
|
||||
this.txnService = service;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the maximimum number of retries. -1 for infinity.
|
||||
*/
|
||||
@@ -132,7 +134,7 @@ public class RetryingTransactionHelper
|
||||
{
|
||||
this.maxRetries = maxRetries;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set whether this helper only supports read transactions.
|
||||
*/
|
||||
@@ -142,15 +144,15 @@ public class RetryingTransactionHelper
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a callback in a transaction until it succeeds, fails
|
||||
* Execute a callback in a transaction until it succeeds, fails
|
||||
* because of an error not the result of an optimistic locking failure,
|
||||
* or a deadlock loser failure, or until a maximum number of retries have
|
||||
* been attempted.
|
||||
* been attempted.
|
||||
* <p>
|
||||
* If there is already an active transaction, then the callback is merely
|
||||
* executed and any retry logic is left to the caller. The transaction
|
||||
* will attempt to be read-write.
|
||||
*
|
||||
*
|
||||
* @param cb The callback containing the unit of work.
|
||||
* @return Returns the result of the unit of work.
|
||||
* @throws RuntimeException all checked exceptions are converted
|
||||
@@ -159,16 +161,16 @@ public class RetryingTransactionHelper
|
||||
{
|
||||
return doInTransaction(cb, false, false);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Execute a callback in a transaction until it succeeds, fails
|
||||
* Execute a callback in a transaction until it succeeds, fails
|
||||
* because of an error not the result of an optimistic locking failure,
|
||||
* or a deadlock loser failure, or until a maximum number of retries have
|
||||
* been attempted.
|
||||
* been attempted.
|
||||
* <p>
|
||||
* If there is already an active transaction, then the callback is merely
|
||||
* executed and any retry logic is left to the caller.
|
||||
*
|
||||
*
|
||||
* @param cb The callback containing the unit of work.
|
||||
* @param readOnly Whether this is a read only transaction.
|
||||
* @return Returns the result of the unit of work.
|
||||
@@ -178,16 +180,16 @@ public class RetryingTransactionHelper
|
||||
{
|
||||
return doInTransaction(cb, readOnly, false);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Execute a callback in a transaction until it succeeds, fails
|
||||
* Execute a callback in a transaction until it succeeds, fails
|
||||
* because of an error not the result of an optimistic locking failure,
|
||||
* or a deadlock loser failure, or until a maximum number of retries have
|
||||
* been attempted.
|
||||
* been attempted.
|
||||
* <p>
|
||||
* It is possible to force a new transaction to be created or to partake in
|
||||
* any existing transaction.
|
||||
*
|
||||
*
|
||||
* @param cb The callback containing the unit of work.
|
||||
* @param readOnly Whether this is a read only transaction.
|
||||
* @param requiresNew <tt>true</tt> to force a new transaction or
|
||||
@@ -274,7 +276,7 @@ public class RetryingTransactionHelper
|
||||
// Rollback if we can.
|
||||
if (txn != null)
|
||||
{
|
||||
try
|
||||
try
|
||||
{
|
||||
int txnStatus = txn.getStatus();
|
||||
// We can only rollback if a transaction was started (NOT NO_TRANSACTION) and
|
||||
@@ -285,18 +287,18 @@ public class RetryingTransactionHelper
|
||||
{
|
||||
txn.rollback();
|
||||
}
|
||||
}
|
||||
catch (IllegalStateException e1)
|
||||
{
|
||||
logger.error(e);
|
||||
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
|
||||
}
|
||||
catch (SecurityException e1)
|
||||
}
|
||||
catch (IllegalStateException e1)
|
||||
{
|
||||
logger.error(e);
|
||||
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
|
||||
}
|
||||
catch (SystemException e1)
|
||||
catch (SecurityException e1)
|
||||
{
|
||||
logger.error(e);
|
||||
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
|
||||
}
|
||||
catch (SystemException e1)
|
||||
{
|
||||
logger.error(e);
|
||||
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
|
||||
@@ -309,7 +311,7 @@ public class RetryingTransactionHelper
|
||||
}
|
||||
else
|
||||
{
|
||||
lastException = (e instanceof RuntimeException) ?
|
||||
lastException = (e instanceof RuntimeException) ?
|
||||
(RuntimeException)e : new AlfrescoRuntimeException("Exception in Transaction.", e);
|
||||
}
|
||||
// Check if there is a cause for retrying
|
||||
@@ -324,7 +326,7 @@ public class RetryingTransactionHelper
|
||||
}
|
||||
catch (InterruptedException ie)
|
||||
{
|
||||
// Do nothing.
|
||||
// Do nothing.
|
||||
}
|
||||
// Try again
|
||||
continue;
|
||||
@@ -340,10 +342,10 @@ public class RetryingTransactionHelper
|
||||
// So, fail.
|
||||
throw lastException;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sometimes, the exception means retry and sometimes not.
|
||||
*
|
||||
*
|
||||
* @param cause the cause to examine
|
||||
* @return Returns the original cause if it is a valid retry cause, otherwise <tt>null</tt>
|
||||
*/
|
||||
|
Reference in New Issue
Block a user