Merged V2.1 to HEAD

6944: More hibernate session cache taming.
   6945: Times for commits are close to linear in the number of items submitted.
   6946: Missing break statement. (Courtesy of Jan).
   6948: Fixed session cache eviction problem triggered by resetLayer().
   6956: Wrapped AVMService and AttributeService in TransactionResourceInterceptor.
   Reverted log4j.properties


git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@7368 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
Derek Hulley
2007-11-12 23:18:09 +00:00
parent e3d5cececb
commit 209dd85a0d
23 changed files with 416 additions and 306 deletions

View File

@@ -1,10 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE beans PUBLIC "-//SPRING/DTD BEAN//EN" <!DOCTYPE beans PUBLIC "-//SPRING/DTD BEAN//EN"
"http://www.springframework.org/dtd/spring-beans.dtd"> "http://www.springframework.org/dtd/spring-beans.dtd">
<beans> <beans>
<bean id="attributeService" class="org.alfresco.repo.attributes.AttributeServiceImpl"> <bean id="attributeService" class="org.springframework.aop.framework.ProxyFactoryBean">
<property name="proxyInterfaces">
<value>org.alfresco.service.cmr.attributes.AttributeService</value>
</property>
<property name="target">
<ref bean="attributeServiceBase"/>
</property>
<property name="interceptorNames">
<list>
<value>sessionSizeResourceInterceptor</value>
</list>
</property>
</bean>
<bean id="attributeServiceBase" class="org.alfresco.repo.attributes.AttributeServiceImpl">
<property name="attributeConverter"> <property name="attributeConverter">
<ref bean="attributeConverter"/> <ref bean="attributeConverter"/>
</property> </property>
@@ -15,9 +30,9 @@
<ref bean="attributeDAO"/> <ref bean="attributeDAO"/>
</property> </property>
</bean> </bean>
<bean id="attributeConverter" class="org.alfresco.repo.attributes.AttributeConverter"/> <bean id="attributeConverter" class="org.alfresco.repo.attributes.AttributeConverter"/>
<bean id="attributeDAO" class="org.alfresco.repo.attributes.hibernate.AttributeDAOHibernate"> <bean id="attributeDAO" class="org.alfresco.repo.attributes.hibernate.AttributeDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>

View File

@@ -5,7 +5,7 @@
<beans> <beans>
<!-- ID Issuers. --> <!-- ID Issuers. -->
<bean id="nodeIssuer" class="org.alfresco.repo.avm.Issuer" depends-on="avmDAOs" > <bean id="nodeIssuer" class="org.alfresco.repo.avm.Issuer" depends-on="avmDAOs" >
<property name="name"> <property name="name">
<value>node</value> <value>node</value>
@@ -25,21 +25,21 @@
</bean> </bean>
<!-- DAOs for persistent data types --> <!-- DAOs for persistent data types -->
<!-- Issuers are not actual entities. More like pseudo entities. --> <!-- Issuers are not actual entities. More like pseudo entities. -->
<bean id="issuerDAO" class="org.alfresco.repo.avm.hibernate.IssuerDAOHibernate"> <bean id="issuerDAO" class="org.alfresco.repo.avm.hibernate.IssuerDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="avmNodeDAO" class="org.alfresco.repo.avm.hibernate.AVMNodeDAOHibernate"> <bean id="avmNodeDAO" class="org.alfresco.repo.avm.hibernate.AVMNodeDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="avmStoreDAO" class="org.alfresco.repo.avm.hibernate.AVMStoreDAOHibernate"> <bean id="avmStoreDAO" class="org.alfresco.repo.avm.hibernate.AVMStoreDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
@@ -54,13 +54,13 @@
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="childEntryDAO" class="org.alfresco.repo.avm.hibernate.ChildEntryDAOHibernate"> <bean id="childEntryDAO" class="org.alfresco.repo.avm.hibernate.ChildEntryDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="historyLinkDAO" class="org.alfresco.repo.avm.hibernate.HistoryLinkDAOHibernate"> <bean id="historyLinkDAO" class="org.alfresco.repo.avm.hibernate.HistoryLinkDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
@@ -78,25 +78,31 @@
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="avmStorePropertyDAO" class="org.alfresco.repo.avm.hibernate.AVMStorePropertyDAOHibernate"> <bean id="avmStorePropertyDAO" class="org.alfresco.repo.avm.hibernate.AVMStorePropertyDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="avmAspectNameDAO" class="org.alfresco.repo.avm.hibernate.AVMAspectNameDAOHibernate"> <bean id="avmAspectNameDAO" class="org.alfresco.repo.avm.hibernate.AVMAspectNameDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="versionLayeredNodeEntryDAO" class="org.alfresco.repo.avm.hibernate.VersionLayeredNodeEntryDAOHibernate"> <bean id="versionLayeredNodeEntryDAO" class="org.alfresco.repo.avm.hibernate.VersionLayeredNodeEntryDAOHibernate">
<property name="sessionFactory"> <property name="sessionFactory">
<ref bean="sessionFactory"/> <ref bean="sessionFactory"/>
</property> </property>
</bean> </bean>
<bean id="sessionCacheChecker" class="org.alfresco.repo.avm.hibernate.SessionCacheChecker">
<property name="sessionFactory">
<ref bean="sessionFactory"/>
</property>
</bean>
<bean id="avmDAOs" class="org.alfresco.repo.avm.AVMDAOs"> <bean id="avmDAOs" class="org.alfresco.repo.avm.AVMDAOs">
<property name="issuerDAO"> <property name="issuerDAO">
<ref bean="issuerDAO"/> <ref bean="issuerDAO"/>
@@ -141,7 +147,7 @@
<ref bean="versionLayeredNodeEntryDAO"/> <ref bean="versionLayeredNodeEntryDAO"/>
</property> </property>
</bean> </bean>
<bean id="lookupCache" class="org.alfresco.repo.avm.LookupCache"> <bean id="lookupCache" class="org.alfresco.repo.avm.LookupCache">
<property name="avmNodeDAO"> <property name="avmNodeDAO">
<ref bean="avmNodeDAO"/> <ref bean="avmNodeDAO"/>
@@ -153,9 +159,9 @@
<ref bean="avmLookupCache"/> <ref bean="avmLookupCache"/>
</property> </property>
</bean> </bean>
<bean id="rawServices" class="org.alfresco.repo.avm.util.RawServices"/> <bean id="rawServices" class="org.alfresco.repo.avm.util.RawServices"/>
<bean id="orphanReaper" class="org.alfresco.repo.avm.OrphanReaper" <bean id="orphanReaper" class="org.alfresco.repo.avm.OrphanReaper"
depends-on="AVMService" destroy-method="shutDown"> depends-on="AVMService" destroy-method="shutDown">
<property name="activeBaseSleep"> <property name="activeBaseSleep">
@@ -176,11 +182,11 @@
</bean> </bean>
<bean id="createStoreTxnListener" class="org.alfresco.repo.avm.CreateStoreTxnListener"/> <bean id="createStoreTxnListener" class="org.alfresco.repo.avm.CreateStoreTxnListener"/>
<bean id="purgeStoreTxnListener" class="org.alfresco.repo.avm.PurgeStoreTxnListener"/> <bean id="purgeStoreTxnListener" class="org.alfresco.repo.avm.PurgeStoreTxnListener"/>
<bean id="createVersionTxnListener" class="org.alfresco.repo.avm.CreateVersionTxnListener"/> <bean id="createVersionTxnListener" class="org.alfresco.repo.avm.CreateVersionTxnListener"/>
<bean id="purgeVersionTxnListener" class="org.alfresco.repo.avm.PurgeVersionTxnListener"/> <bean id="purgeVersionTxnListener" class="org.alfresco.repo.avm.PurgeVersionTxnListener"/>
<bean id="avmRepository" class="org.alfresco.repo.avm.AVMRepository"> <bean id="avmRepository" class="org.alfresco.repo.avm.AVMRepository">
@@ -237,14 +243,14 @@
<!-- Used to notify virtualization server occur after commit/rollback --> <!-- Used to notify virtualization server occur after commit/rollback -->
<bean id="AVMSubmitTransactionListener" <bean id="AVMSubmitTransactionListener"
class="org.alfresco.repo.avm.wf.AVMSubmitTransactionListener"/> class="org.alfresco.repo.avm.wf.AVMSubmitTransactionListener"/>
<!-- NameMatcher beans for filtering what shows up as different in compares. --> <!-- NameMatcher beans for filtering what shows up as different in compares. -->
<bean id="excludeRegexMatcher" class="org.alfresco.util.RegexNameMatcher"> <bean id="excludeRegexMatcher" class="org.alfresco.util.RegexNameMatcher">
<property name="patterns"> <property name="patterns">
<!-- <!--
NOTE: Regexes are implicitly anchored with ^ and $ in this context. NOTE: Regexes are implicitly anchored with ^ and $ in this context.
--> -->
<list> <list>
@@ -252,7 +258,7 @@
</list> </list>
</property> </property>
</bean> </bean>
<bean id="excludeExtensionMatcher" class="org.alfresco.repo.avm.util.FileExtensionNameMatcher"> <bean id="excludeExtensionMatcher" class="org.alfresco.repo.avm.util.FileExtensionNameMatcher">
<property name="extensions"> <property name="extensions">
<list> <list>
@@ -262,8 +268,8 @@
<value>~</value> <value>~</value>
</list> </list>
</property> </property>
</bean> </bean>
<bean id="globalPathExcluder" class="org.alfresco.util.OrCompositeNameMatcher"> <bean id="globalPathExcluder" class="org.alfresco.util.OrCompositeNameMatcher">
<property name="matchers"> <property name="matchers">
<list> <list>
@@ -272,7 +278,7 @@
</list> </list>
</property> </property>
</bean> </bean>
<!-- Bean used to process content that has expired in AVM staging areas --> <!-- Bean used to process content that has expired in AVM staging areas -->
<bean id="avmExpiredContentProcessor" class="org.alfresco.repo.avm.AVMExpiredContentProcessor"> <bean id="avmExpiredContentProcessor" class="org.alfresco.repo.avm.AVMExpiredContentProcessor">
<property name="adminUserName"> <property name="adminUserName">
@@ -317,13 +323,13 @@
</bean> </bean>
<!-- AVM Locking. --> <!-- AVM Locking. -->
<bean id="avmLockingService" class="org.alfresco.repo.avm.locking.AVMLockingServiceImpl"> <bean id="avmLockingService" class="org.alfresco.repo.avm.locking.AVMLockingServiceImpl">
<property name="attributeService"> <property name="attributeService">
<ref bean="attributeService"/> <ref bean="attributeService"/>
</property> </property>
<property name="authorityService"> <property name="authorityService">
<ref bean="authorityService"/> <ref bean="authorityService"/>
</property> </property>
<property name="personService"> <property name="personService">
<ref bean="personService"/> <ref bean="personService"/>

View File

@@ -1,9 +1,9 @@
<?xml version='1.0' encoding='UTF-8'?> <?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'> <!DOCTYPE beans PUBLIC '-//SPRING//DTD BEAN//EN' 'http://www.springframework.org/dtd/spring-beans.dtd'>
<!-- Beans pertinent to node persistence and services --> <!-- Beans pertinent to node persistence and services -->
<beans> <beans>
<bean id="mlPropertyInterceptor" class="org.alfresco.repo.node.MLPropertyInterceptor"> <bean id="mlPropertyInterceptor" class="org.alfresco.repo.node.MLPropertyInterceptor">
<property name="nodeService"> <property name="nodeService">
<ref bean="mlAwareNodeService" /> <ref bean="mlAwareNodeService" />
@@ -15,7 +15,7 @@
<ref bean="dictionaryService" /> <ref bean="dictionaryService" />
</property> </property>
</bean> </bean>
<bean id="nodeRefPropertyInterceptor" class="org.alfresco.repo.node.NodeRefPropertyMethodInterceptor"> <bean id="nodeRefPropertyInterceptor" class="org.alfresco.repo.node.NodeRefPropertyMethodInterceptor">
<property name="nodeService"> <property name="nodeService">
<ref bean="mlAwareNodeService" /> <ref bean="mlAwareNodeService" />
@@ -24,7 +24,7 @@
<ref bean="dictionaryService" /> <ref bean="dictionaryService" />
</property> </property>
</bean> </bean>
<bean id="nodeService" class="org.springframework.aop.framework.ProxyFactoryBean" > <bean id="nodeService" class="org.springframework.aop.framework.ProxyFactoryBean" >
<property name="targetName"> <property name="targetName">
<value>mlAwareNodeService</value> <value>mlAwareNodeService</value>
@@ -57,7 +57,7 @@
</map> </map>
</property> </property>
</bean> </bean>
<!-- Map stores to archive stores --> <!-- Map stores to archive stores -->
<bean id="storeArchiveMap" class="org.alfresco.repo.node.StoreArchiveMap"> <bean id="storeArchiveMap" class="org.alfresco.repo.node.StoreArchiveMap">
<property name="archiveMap"> <property name="archiveMap">
@@ -66,7 +66,7 @@
</map> </map>
</property> </property>
</bean> </bean>
<!-- Wrapper component to handle restore and purge of archived nodes --> <!-- Wrapper component to handle restore and purge of archived nodes -->
<bean id="nodeArchiveService" class="org.alfresco.repo.node.archive.NodeArchiveServiceImpl" > <bean id="nodeArchiveService" class="org.alfresco.repo.node.archive.NodeArchiveServiceImpl" >
<property name="nodeService"> <property name="nodeService">
@@ -92,7 +92,7 @@
<ref bean="policyComponent"/> <ref bean="policyComponent"/>
</property> </property>
</bean> </bean>
<!-- Handles policy callbacks to ensure that node hierarchy gets indexed --> <!-- Handles policy callbacks to ensure that node hierarchy gets indexed -->
<bean id="nodeIndexer" class="org.alfresco.repo.node.index.NodeIndexer" init-method="init"> <bean id="nodeIndexer" class="org.alfresco.repo.node.index.NodeIndexer" init-method="init">
<property name="policyComponent"> <property name="policyComponent">
@@ -103,7 +103,7 @@
</property> </property>
</bean> </bean>
<!-- ensures model-compliance of node structures --> <!-- ensures model-compliance of node structures -->
<bean id="integrityChecker" class="org.alfresco.repo.node.integrity.IntegrityChecker" init-method="init"> <bean id="integrityChecker" class="org.alfresco.repo.node.integrity.IntegrityChecker" init-method="init">
<property name="policyComponent"> <property name="policyComponent">
<ref bean="policyComponent"/> <ref bean="policyComponent"/>
@@ -128,7 +128,7 @@
</property> </property>
</bean> </bean>
<!-- tags nodes that are incomplete w.r.t. properties--> <!-- tags nodes that are incomplete w.r.t. properties-->
<bean id="incompleteNodeTagger" class="org.alfresco.repo.node.integrity.IncompleteNodeTagger" init-method="init"> <bean id="incompleteNodeTagger" class="org.alfresco.repo.node.integrity.IncompleteNodeTagger" init-method="init">
<property name="policyComponent"> <property name="policyComponent">
<ref bean="policyComponent"/> <ref bean="policyComponent"/>
@@ -185,7 +185,8 @@
<value>10000</value> <value>10000</value>
</property> </property>
<property name="resourceManagerCallFrequencyMillis"> <property name="resourceManagerCallFrequencyMillis">
<value>5000</value> <!-- Was 5000 -->
<value>1000</value>
</property> </property>
</bean> </bean>
<bean id="sessionSizeResourceManager" class="org.alfresco.repo.domain.hibernate.SessionSizeResourceManager"> <bean id="sessionSizeResourceManager" class="org.alfresco.repo.domain.hibernate.SessionSizeResourceManager">
@@ -193,8 +194,9 @@
<ref bean="sessionFactory" /> <ref bean="sessionFactory" />
</property> </property>
<property name="threshold"> <property name="threshold">
<value>5000</value> <!-- Was 5000 -->
<value>100</value>
</property> </property>
</bean> </bean>
</beans> </beans>

View File

@@ -749,7 +749,7 @@
</bean> </bean>
<!-- The AVMLockingAwareService --> <!-- The AVMLockingAwareService -->
<bean id="avmLockingAwareService" class="org.alfresco.repo.avm.AVMLockingAwareService"> <bean id="avmLockingAwareService" class="org.alfresco.repo.avm.AVMLockingAwareService">
<!-- Because of circular dependendencies, this bean's dependencies <!-- Because of circular dependendencies, this bean's dependencies
are grabbed at bootstrap time.x --> are grabbed at bootstrap time.x -->
@@ -791,7 +791,21 @@
<!-- The AVMService --> <!-- The AVMService -->
<bean id="avmService" class="org.alfresco.repo.avm.AVMServiceImpl"> <bean id="avmService" class="org.springframework.aop.framework.ProxyFactoryBean">
<property name="proxyInterfaces">
<value>org.alfresco.service.cmr.avm.AVMService</value>
</property>
<property name="target">
<ref bean="avmServiceBase"/>
</property>
<property name="interceptorNames">
<list>
<value>sessionSizeResourceInterceptor</value>
</list>
</property>
</bean>
<bean id="avmServiceBase" class="org.alfresco.repo.avm.AVMServiceImpl">
<property name="avmRepository"> <property name="avmRepository">
<ref bean="avmRepository"/> <ref bean="avmRepository"/>
</property> </property>
@@ -1364,7 +1378,7 @@
<!-- LinkValidationService --> <!-- LinkValidationService -->
<!-- Read transaction advisor for link validation service. --> <!-- Read transaction advisor for link validation service. -->
<bean id="linkValidationServiceReadTxnAdvisor" <bean id="linkValidationServiceReadTxnAdvisor"
class="org.springframework.aop.support.NameMatchMethodPointcutAdvisor"> class="org.springframework.aop.support.NameMatchMethodPointcutAdvisor">
<property name="advice"> <property name="advice">
<ref bean="retryingReadTxnAdvice"/> <ref bean="retryingReadTxnAdvice"/>
@@ -1382,7 +1396,7 @@
</bean> </bean>
<!-- Write transaction advisor for link validation service. --> <!-- Write transaction advisor for link validation service. -->
<bean id="linkValidationServiceWriteTxnAdvisor" <bean id="linkValidationServiceWriteTxnAdvisor"
class="org.springframework.aop.support.NameMatchMethodPointcutAdvisor"> class="org.springframework.aop.support.NameMatchMethodPointcutAdvisor">
<property name="advice"> <property name="advice">
<ref bean="retryingWriteTxnAdvice"/> <ref bean="retryingWriteTxnAdvice"/>
@@ -1394,7 +1408,7 @@
</property> </property>
</bean> </bean>
<bean id="LinkValidationService" <bean id="LinkValidationService"
class="org.springframework.aop.framework.ProxyFactoryBean" class="org.springframework.aop.framework.ProxyFactoryBean"
lazy-init="true"> lazy-init="true">
<property name="proxyInterfaces"> <property name="proxyInterfaces">

View File

@@ -15,17 +15,20 @@
* along with this program; if not, write to the Free Software * along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of * As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre * the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's * and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing * FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here: * the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing * http://www.alfresco.com/legal/licensing
*/ */
package org.alfresco.repo.attributes; package org.alfresco.repo.attributes;
import java.util.Map;
import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.repo.avm.AVMDAOs;
/** /**
* Handles conversions between persistent and value based Attributes. * Handles conversions between persistent and value based Attributes.
@@ -34,7 +37,7 @@ import org.alfresco.error.AlfrescoRuntimeException;
public class AttributeConverter public class AttributeConverter
{ {
/** /**
* Convert an Attribute (recursively) to a persistent attribute. This persists * Convert an Attribute (recursively) to a persistent attribute. This persists
* the newly created Attribute immediately. * the newly created Attribute immediately.
* @param from The Attribute to clone. * @param from The Attribute to clone.
* @return The cloned persistent Attribute. * @return The cloned persistent Attribute.
@@ -96,56 +99,78 @@ public class AttributeConverter
public Attribute toValue(Attribute from) public Attribute toValue(Attribute from)
{ {
Attribute ret = null;
switch (from.getType()) switch (from.getType())
{ {
case BOOLEAN : case BOOLEAN :
{ {
return new BooleanAttributeValue((BooleanAttribute)from); ret = new BooleanAttributeValue((BooleanAttribute)from);
break;
} }
case BYTE : case BYTE :
{ {
return new ByteAttributeValue((ByteAttribute)from); ret = new ByteAttributeValue((ByteAttribute)from);
break;
} }
case SHORT : case SHORT :
{ {
return new ShortAttributeValue((ShortAttribute)from); ret = new ShortAttributeValue((ShortAttribute)from);
break;
} }
case INT : case INT :
{ {
return new IntAttributeValue((IntAttribute)from); ret = new IntAttributeValue((IntAttribute)from);
break;
} }
case LONG : case LONG :
{ {
return new LongAttributeValue((LongAttribute)from); ret = new LongAttributeValue((LongAttribute)from);
break;
} }
case FLOAT : case FLOAT :
{ {
return new FloatAttributeValue((FloatAttribute)from); ret = new FloatAttributeValue((FloatAttribute)from);
break;
} }
case DOUBLE : case DOUBLE :
{ {
return new DoubleAttributeValue((DoubleAttribute)from); ret = new DoubleAttributeValue((DoubleAttribute)from);
break;
} }
case STRING : case STRING :
{ {
return new StringAttributeValue((StringAttribute)from); ret = new StringAttributeValue((StringAttribute)from);
break;
} }
case SERIALIZABLE : case SERIALIZABLE :
{ {
return new SerializableAttributeValue((SerializableAttribute)from); ret = new SerializableAttributeValue((SerializableAttribute)from);
break;
} }
case MAP : case MAP :
{ {
return new MapAttributeValue((MapAttribute)from); ret = new MapAttributeValue();
for (Map.Entry<String, Attribute> entry : from.entrySet())
{
ret.put(entry.getKey(), toValue(entry.getValue()));
}
break;
} }
case LIST : case LIST :
{ {
return new ListAttributeValue((ListAttribute)from); ret = new ListAttributeValue();
for (Attribute child : from)
{
ret.add(toValue(child));
}
break;
} }
default : default :
{ {
throw new AlfrescoRuntimeException("Invalid Attribute Type: " + from.getType()); throw new AlfrescoRuntimeException("Invalid Attribute Type: " + from.getType());
} }
} }
AVMDAOs.Instance().fAttributeDAO.evictFlat(from);
return ret;
} }
} }

View File

@@ -68,4 +68,15 @@ public interface AttributeDAO
* @param attr * @param attr
*/ */
public void evict(Attribute attr); public void evict(Attribute attr);
/**
* Evict an Attribute non-recursively.
* @param attr
*/
public void evictFlat(Attribute attr);
/**
* Force a flush.
*/
public void flush();
} }

View File

@@ -193,7 +193,6 @@ public class AttributeServiceImpl implements AttributeService
return null; return null;
} }
Attribute converted = fAttributeConverter.toValue(found); Attribute converted = fAttributeConverter.toValue(found);
fAttributeDAO.evict(found);
return converted; return converted;
} }
@@ -255,7 +254,6 @@ public class AttributeServiceImpl implements AttributeService
} }
Attribute converted = fAttributeConverter.toPersistent(value); Attribute converted = fAttributeConverter.toPersistent(value);
found.put(name, converted); found.put(name, converted);
fAttributeDAO.evict(converted);
} }
/* (non-Javadoc) /* (non-Javadoc)
@@ -316,6 +314,8 @@ public class AttributeServiceImpl implements AttributeService
throw new AVMWrongTypeException("Attribute Not Map: " + keys); throw new AVMWrongTypeException("Attribute Not Map: " + keys);
} }
found.remove(name); found.remove(name);
fAttributeDAO.flush();
fAttributeDAO.evictFlat(found);
} }
private Attribute getAttributeFromPath(List<String> keys) private Attribute getAttributeFromPath(List<String> keys)
@@ -330,11 +330,15 @@ public class AttributeServiceImpl implements AttributeService
{ {
if (current.getType() == Type.MAP) if (current.getType() == Type.MAP)
{ {
current = current.get(keys.get(i)); Attribute newCurrent = current.get(keys.get(i));
fAttributeDAO.evictFlat(current);
current = newCurrent;
} }
else if (current.getType() == Type.LIST) else if (current.getType() == Type.LIST)
{ {
current = current.get(Integer.parseInt(keys.get(i))); Attribute newCurrent = current.get(Integer.parseInt(keys.get(i)));
fAttributeDAO.evictFlat(current);
current = newCurrent;
} }
else else
{ {
@@ -486,7 +490,6 @@ public class AttributeServiceImpl implements AttributeService
} }
Attribute converted = fAttributeConverter.toPersistent(value); Attribute converted = fAttributeConverter.toPersistent(value);
found.set(index, fAttributeConverter.toPersistent(value)); found.set(index, fAttributeConverter.toPersistent(value));
fAttributeDAO.evict(converted);
} }
/* (non-Javadoc) /* (non-Javadoc)
@@ -515,7 +518,13 @@ public class AttributeServiceImpl implements AttributeService
{ {
throw new AVMBadArgumentException("Illegal zero length keys list."); throw new AVMBadArgumentException("Illegal zero length keys list.");
} }
return getAttributeFromPath(keys) != null; Attribute attr = getAttributeFromPath(keys);
if (attr != null)
{
fAttributeDAO.evictFlat(attr);
return true;
}
return false;
} }
/* (non-Javadoc) /* (non-Javadoc)

View File

@@ -15,11 +15,11 @@
* along with this program; if not, write to the Free Software * along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of * As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre * the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's * and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing * FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here: * the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing * http://www.alfresco.com/legal/licensing
*/ */
@@ -49,7 +49,7 @@ public class MapAttributeImpl extends AttributeImpl implements MapAttribute
public MapAttributeImpl() public MapAttributeImpl()
{ {
} }
public MapAttributeImpl(MapAttribute attr) public MapAttributeImpl(MapAttribute attr)
{ {
super(attr.getAcl()); super(attr.getAcl());
@@ -125,7 +125,7 @@ public class MapAttributeImpl extends AttributeImpl implements MapAttribute
AVMDAOs.Instance().fMapEntryDAO.save(mapEntry); AVMDAOs.Instance().fMapEntryDAO.save(mapEntry);
} }
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.alfresco.repo.attributes.Attribute#getType() * @see org.alfresco.repo.attributes.Attribute#getType()
*/ */
@@ -170,7 +170,8 @@ public class MapAttributeImpl extends AttributeImpl implements MapAttribute
{ {
return null; return null;
} }
return entry.getAttribute(); Attribute attr = entry.getAttribute();
return attr;
} }
/* (non-Javadoc) /* (non-Javadoc)

View File

@@ -3,7 +3,7 @@ package org.alfresco.repo.attributes;
import java.util.List; import java.util.List;
/** /**
* Interface for MapEntry persistence. * Interface for MapEntry persistence.
* @author britt * @author britt
*/ */
public interface MapEntryDAO public interface MapEntryDAO
@@ -13,37 +13,43 @@ public interface MapEntryDAO
* @param entry To save. * @param entry To save.
*/ */
public void save(MapEntry entry); public void save(MapEntry entry);
/** /**
* Delete a MapEntry. * Delete a MapEntry.
* @param entry * @param entry
*/ */
public void delete(MapEntry entry); public void delete(MapEntry entry);
/** /**
* Delete all entries for a map. * Delete all entries for a map.
* @param mapAttr The map to purge. * @param mapAttr The map to purge.
*/ */
public void delete(MapAttribute mapAttr); public void delete(MapAttribute mapAttr);
/** /**
* Get an entry by name. * Get an entry by name.
* @param key The key of the entry. * @param key The key of the entry.
* @return A MapEntry or null. * @return A MapEntry or null.
*/ */
public MapEntry get(MapEntryKey key); public MapEntry get(MapEntryKey key);
/** /**
* Retrieve all the entries in a map. * Retrieve all the entries in a map.
* @param mapAttr * @param mapAttr
* @return A List of all entries in the given map. * @return A List of all entries in the given map.
*/ */
public List<MapEntry> get(MapAttribute mapAttr); public List<MapEntry> get(MapAttribute mapAttr);
/** /**
* Get the number of entries in a MapAttribute. * Get the number of entries in a MapAttribute.
* @param mapAttr The MapAttribute/ * @param mapAttr The MapAttribute/
* @return The number of entries. * @return The number of entries.
*/ */
public int size(MapAttribute mapAttr); public int size(MapAttribute mapAttr);
/**
* Evict an entry.
* @param entry
*/
public void evict(MapEntry entry);
} }

View File

@@ -27,8 +27,10 @@ package org.alfresco.repo.attributes.hibernate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.alfresco.repo.attributes.AttrQueryHelperImpl; import org.alfresco.repo.attributes.AttrQueryHelperImpl;
import org.alfresco.repo.attributes.Attribute; import org.alfresco.repo.attributes.Attribute;
@@ -40,12 +42,14 @@ import org.alfresco.repo.attributes.MapAttribute;
import org.alfresco.repo.attributes.MapEntry; import org.alfresco.repo.attributes.MapEntry;
import org.alfresco.repo.attributes.MapEntryDAO; import org.alfresco.repo.attributes.MapEntryDAO;
import org.alfresco.repo.attributes.Attribute.Type; import org.alfresco.repo.attributes.Attribute.Type;
import org.alfresco.repo.avm.hibernate.SessionCacheChecker;
import org.alfresco.service.cmr.attributes.AttrQuery; import org.alfresco.service.cmr.attributes.AttrQuery;
import org.alfresco.service.cmr.attributes.AttrQueryHelper; import org.alfresco.service.cmr.attributes.AttrQueryHelper;
import org.alfresco.util.Pair; import org.alfresco.util.Pair;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.hibernate.Query; import org.hibernate.Query;
import org.hibernate.engine.EntityKey;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/** /**
@@ -87,7 +91,6 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
for (MapEntry entry : mapEntries) for (MapEntry entry : mapEntries)
{ {
Attribute subAttr = entry.getAttribute(); Attribute subAttr = entry.getAttribute();
getSession().evict(entry);
fMapEntryDAO.delete(entry); fMapEntryDAO.delete(entry);
delete(subAttr); delete(subAttr);
} }
@@ -99,7 +102,6 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
for (ListEntry entry : listEntries) for (ListEntry entry : listEntries)
{ {
Attribute subAttr = entry.getAttribute(); Attribute subAttr = entry.getAttribute();
getSession().evict(entry);
fListEntryDAO.delete(entry); fListEntryDAO.delete(entry);
delete(subAttr); delete(subAttr);
} }
@@ -108,7 +110,6 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
{ {
fgLogger.debug("Entities: " + getSession().getStatistics().getEntityCount()); fgLogger.debug("Entities: " + getSession().getStatistics().getEntityCount());
} }
getSession().evict(attr);
getSession().delete(attr); getSession().delete(attr);
} }
@@ -161,20 +162,20 @@ public class AttributeDAOHibernate extends HibernateDaoSupport implements
*/ */
public void evict(Attribute attr) public void evict(Attribute attr)
{ {
if (attr.getType() == Attribute.Type.MAP) }
{
for (Attribute child : attr.values()) /* (non-Javadoc)
{ * @see org.alfresco.repo.attributes.AttributeDAO#flush()
evict(child); */
} public void flush()
} {
if (attr.getType() == Attribute.Type.LIST) getSession().flush();
{ }
for (Attribute child : attr)
{ /* (non-Javadoc)
evict(child); * @see org.alfresco.repo.attributes.AttributeDAO#evictFlat(org.alfresco.repo.attributes.Attribute)
} */
} public void evictFlat(Attribute attr)
getSession().evict(attr); {
} }
} }

View File

@@ -15,11 +15,11 @@
* along with this program; if not, write to the Free Software * along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of * As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre * the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's * and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing * FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here: * the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing * http://www.alfresco.com/legal/licensing
*/ */
@@ -96,4 +96,11 @@ public class MapEntryDAOHibernate extends HibernateDaoSupport implements
query.setEntity("map", mapAttr); query.setEntity("map", mapAttr);
return ((Long)query.uniqueResult()).intValue(); return ((Long)query.uniqueResult()).intValue();
} }
/* (non-Javadoc)
* @see org.alfresco.repo.attributes.MapEntryDAO#evict(org.alfresco.repo.attributes.MapEntry)
*/
public void evict(MapEntry entry)
{
}
} }

View File

@@ -340,8 +340,6 @@ public class AVMRepository
dir.putChild(name, child); dir.putChild(name, child);
fLookupCache.onWrite(pathParts[0]); fLookupCache.onWrite(pathParts[0]);
AVMNodeDescriptor desc = child.getDescriptor(parent.getPath(), name, parent.getIndirection(), parent.getIndirectionVersion()); AVMNodeDescriptor desc = child.getDescriptor(parent.getPath(), name, parent.getIndirection(), parent.getIndirectionVersion());
fAVMNodeDAO.flush();
fAVMNodeDAO.evict(child);
return desc; return desc;
} }
@@ -1046,7 +1044,8 @@ public class AVMRepository
throw new AVMWrongTypeException("Not a directory."); throw new AVMWrongTypeException("Not a directory.");
} }
DirectoryNode dirNode = (DirectoryNode)node; DirectoryNode dirNode = (DirectoryNode)node;
return dirNode.getListing(dir, includeDeleted); SortedMap<String, AVMNodeDescriptor> listing = dirNode.getListing(dir, includeDeleted);
return listing;
} }
finally finally
{ {
@@ -2483,8 +2482,6 @@ public class AVMRepository
} }
LayeredDirectoryNode dir = (LayeredDirectoryNode)node; LayeredDirectoryNode dir = (LayeredDirectoryNode)node;
dir.flatten(name); dir.flatten(name);
fAVMNodeDAO.flush();
fAVMNodeDAO.evict(dir);
} }
finally finally
{ {
@@ -2516,8 +2513,6 @@ public class AVMRepository
} }
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
AVMNodeDescriptor desc = node.getDescriptor(lPath); AVMNodeDescriptor desc = node.getDescriptor(lPath);
fAVMNodeDAO.flush();
fAVMNodeDAO.evict(node);
return desc; return desc;
} }
finally finally
@@ -2738,7 +2733,6 @@ public class AVMRepository
throw new AVMNotFoundException("Node not found: " + desc); throw new AVMNotFoundException("Node not found: " + desc);
} }
Set<QName> aspects = node.getAspects(); Set<QName> aspects = node.getAspects();
fAVMNodeDAO.evict(node);
return aspects; return aspects;
} }
} }

View File

@@ -361,8 +361,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
{ {
newDir.getProperties().putAll(properties); newDir.getProperties().putAll(properties);
} }
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(newDir);
} }
/** /**
@@ -407,8 +405,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
dir.updateModTime(); dir.updateModTime();
dir.putChild(name, newDir); dir.putChild(name, newDir);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(newDir);
// newDir.setVersionID(getNextVersionID()); // newDir.setVersionID(getNextVersionID());
} }
@@ -445,8 +441,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
-1, -1,
"UTF-8")); "UTF-8"));
ContentWriter writer = createContentWriter(AVMNodeConverter.ExtendAVMPath(path, name)); ContentWriter writer = createContentWriter(AVMNodeConverter.ExtendAVMPath(path, name));
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(file);
return writer.getContentOutputStream(); return writer.getContentOutputStream();
} }
@@ -490,10 +484,9 @@ public class AVMStoreImpl implements AVMStore, Serializable
{ {
file.getProperties().putAll(properties); file.getProperties().putAll(properties);
} }
ContentWriter writer = createContentWriter(AVMNodeConverter.ExtendAVMPath(path, name));
// Yet another flush. // Yet another flush.
AVMDAOs.Instance().fAVMNodeDAO.flush(); AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(file); ContentWriter writer = createContentWriter(AVMNodeConverter.ExtendAVMPath(path, name));
writer.putContent(data); writer.putContent(data);
} }
@@ -526,8 +519,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
dir.updateModTime(); dir.updateModTime();
dir.putChild(name, newFile); dir.putChild(name, newFile);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(newFile);
// newFile.setVersionID(getNextVersionID()); // newFile.setVersionID(getNextVersionID());
} }
@@ -633,7 +624,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode child = AVMNodeUnwrapper.Unwrap(listing.get(name)); AVMNode child = AVMNodeUnwrapper.Unwrap(listing.get(name));
AVMNodeDescriptor desc = child.getDescriptor(lPath, name); AVMNodeDescriptor desc = child.getDescriptor(lPath, name);
results.put(name, desc); results.put(name, desc);
AVMDAOs.Instance().fAVMNodeDAO.evict(child);
} }
return results; return results;
} }
@@ -653,8 +643,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
DirectoryNode dir = (DirectoryNode)lPath.getCurrentNode(); DirectoryNode dir = (DirectoryNode)lPath.getCurrentNode();
List<String> deleted = dir.getDeletedNames(); List<String> deleted = dir.getDeletedNames();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
return deleted; return deleted;
} }
@@ -688,8 +676,8 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
dir.removeChild(lPath, name); dir.removeChild(lPath, name);
dir.updateModTime(); dir.updateModTime();
AVMDAOs.Instance().fAVMNodeDAO.flush(); // AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(dir); // AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
} }
/** /**
@@ -711,8 +699,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
((LayeredDirectoryNode)node).uncover(lPath, name); ((LayeredDirectoryNode)node).uncover(lPath, name);
node.updateModTime(); node.updateModTime();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
// TODO This is problematic. As time goes on this returns // TODO This is problematic. As time goes on this returns
@@ -882,8 +868,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
dir.turnPrimary(lPath); dir.turnPrimary(lPath);
dir.updateModTime(); dir.updateModTime();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
} }
/** /**
@@ -905,8 +889,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
dir.retarget(lPath, target); dir.retarget(lPath, target);
dir.updateModTime(); dir.updateModTime();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
} }
/** /**
@@ -1080,8 +1062,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
((LayeredDirectoryNode)node).setOpacity(opacity); ((LayeredDirectoryNode)node).setOpacity(opacity);
node.updateModTime(); node.updateModTime();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
// TODO Does it make sense to set properties on DeletedNodes? // TODO Does it make sense to set properties on DeletedNodes?
@@ -1101,8 +1081,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.setProperty(name, value); node.setProperty(name, value);
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1120,8 +1098,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.addProperties(properties); node.addProperties(properties);
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1140,8 +1116,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
PropertyValue prop = node.getProperty(name); PropertyValue prop = node.getProperty(name);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
return prop; return prop;
} }
@@ -1160,8 +1134,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
Map<QName, PropertyValue> props = node.getProperties(); Map<QName, PropertyValue> props = node.getProperties();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
return props; return props;
} }
@@ -1180,8 +1152,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
node.deleteProperty(name); node.deleteProperty(name);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1198,8 +1168,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
node.deleteProperties(); node.deleteProperties();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1287,8 +1255,8 @@ public class AVMStoreImpl implements AVMStore, Serializable
throw new AVMWrongTypeException("File Expected."); throw new AVMWrongTypeException("File Expected.");
} }
ContentData content = ((FileNode)node).getContentData(lPath); ContentData content = ((FileNode)node).getContentData(lPath);
AVMDAOs.Instance().fAVMNodeDAO.flush(); // AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node); // AVMDAOs.Instance().fAVMNodeDAO.evict(node);
return content; return content;
} }
@@ -1312,8 +1280,8 @@ public class AVMStoreImpl implements AVMStore, Serializable
node.updateModTime(); node.updateModTime();
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
ContentData content = ((FileNode)node).getContentData(lPath); ContentData content = ((FileNode)node).getContentData(lPath);
AVMDAOs.Instance().fAVMNodeDAO.flush(); // AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node); // AVMDAOs.Instance().fAVMNodeDAO.evict(node);
return content; return content;
} }
@@ -1335,8 +1303,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
throw new AVMWrongTypeException("File Expected."); throw new AVMWrongTypeException("File Expected.");
} }
((FileNode)node).setContentData(data); ((FileNode)node).setContentData(data);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1354,8 +1320,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.copyMetaDataFrom(from); node.copyMetaDataFrom(from);
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1373,8 +1337,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.getAspects().add(aspectName); node.getAspects().add(aspectName);
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1392,8 +1354,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
Set<QName> aspects = node.getAspects(); Set<QName> aspects = node.getAspects();
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
return aspects; return aspects;
} }
@@ -1419,8 +1379,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
node.getProperties().remove(name); node.getProperties().remove(name);
} }
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1439,7 +1397,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
boolean has = node.getAspects().contains(aspectName); boolean has = node.getAspects().contains(aspectName);
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
return has; return has;
} }
@@ -1458,8 +1415,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.setAcl(acl); node.setAcl(acl);
node.setGuid(GUID.generate()); node.setGuid(GUID.generate());
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/** /**
@@ -1532,9 +1487,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
toLink.getAspects().add(WCMModel.ASPECT_REVERTED); toLink.getAspects().add(WCMModel.ASPECT_REVERTED);
PropertyValue value = new PropertyValue(null, toRevertTo.getId()); PropertyValue value = new PropertyValue(null, toRevertTo.getId());
toLink.setProperty(WCMModel.PROP_REVERTED_ID, value); toLink.setProperty(WCMModel.PROP_REVERTED_ID, value);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(dir);
AVMDAOs.Instance().fAVMNodeDAO.evict(toLink);
} }
/* (non-Javadoc) /* (non-Javadoc)
@@ -1549,8 +1501,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
AVMNode node = lPath.getCurrentNode(); AVMNode node = lPath.getCurrentNode();
node.setGuid(guid); node.setGuid(guid);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
} }
/* (non-Javadoc) /* (non-Javadoc)
@@ -1570,8 +1520,6 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
PlainFileNode file = (PlainFileNode)node; PlainFileNode file = (PlainFileNode)node;
file.setEncoding(encoding); file.setEncoding(encoding);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(file);
} }
/* (non-Javadoc) /* (non-Javadoc)
@@ -1591,7 +1539,5 @@ public class AVMStoreImpl implements AVMStore, Serializable
} }
PlainFileNode file = (PlainFileNode)node; PlainFileNode file = (PlainFileNode)node;
file.setMimeType(mimeType); file.setMimeType(mimeType);
AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fAVMNodeDAO.evict(file);
} }
} }

View File

@@ -50,7 +50,7 @@ import org.apache.commons.logging.LogFactory;
public class AVMSyncServiceImpl implements AVMSyncService public class AVMSyncServiceImpl implements AVMSyncService
{ {
private static Log fgLogger = LogFactory.getLog(AVMSyncServiceImpl.class); private static Log fgLogger = LogFactory.getLog(AVMSyncServiceImpl.class);
/** /**
* The AVMService. * The AVMService.
*/ */
@@ -60,7 +60,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
* The AVMRepository. * The AVMRepository.
*/ */
private AVMRepository fAVMRepository; private AVMRepository fAVMRepository;
/** /**
* Do nothing constructor. * Do nothing constructor.
*/ */
@@ -69,19 +69,19 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
/** /**
* Set the AVM Service. For Spring. * Set the AVM Service. For Spring.
* @param avmService The AVMService reference. * @param avmService The AVMService reference.
*/ */
public void setAvmService(AVMService avmService) public void setAvmService(AVMService avmService)
{ {
fAVMService = avmService; fAVMService = avmService;
} }
public void setAvmRepository(AVMRepository avmRepository) public void setAvmRepository(AVMRepository avmRepository)
{ {
fAVMRepository = avmRepository; fAVMRepository = avmRepository;
} }
/** /**
* Get a difference list between two corresponding node trees. * Get a difference list between two corresponding node trees.
* @param srcVersion The version id for the source tree. * @param srcVersion The version id for the source tree.
@@ -92,7 +92,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
* @return A List of AVMDifference structs which can be used for * @return A List of AVMDifference structs which can be used for
* the update operation. * the update operation.
*/ */
public List<AVMDifference> compare(int srcVersion, String srcPath, public List<AVMDifference> compare(int srcVersion, String srcPath,
int dstVersion, String dstPath, int dstVersion, String dstPath,
NameMatcher excluder) NameMatcher excluder)
{ {
@@ -133,7 +133,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
return result; return result;
} }
/** /**
* Internal recursive implementation of compare. * Internal recursive implementation of compare.
* @param srcVersion The version of the source tree. * @param srcVersion The version of the source tree.
@@ -171,9 +171,9 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
case AVMDifference.DIRECTORY : case AVMDifference.DIRECTORY :
{ {
// First special case: source is a layered directory which points to // First special case: source is a layered directory which points to
// the destinations path, and we are comparing 'head' versions. // the destinations path, and we are comparing 'head' versions.
if (srcDesc.isLayeredDirectory() && if (srcDesc.isLayeredDirectory() &&
srcDesc.getIndirection().equals(dstDesc.getPath()) && srcVersion < 0 && dstVersion < 0) srcDesc.getIndirection().equals(dstDesc.getPath()) && srcVersion < 0 && dstVersion < 0)
{ {
// Get only a direct listing, since that's all that can be different. // Get only a direct listing, since that's all that can be different.
@@ -202,7 +202,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
{ {
// A missing destination child means the source is NEWER. // A missing destination child means the source is NEWER.
result.add(new AVMDifference(srcVersion, srcChild.getPath(), result.add(new AVMDifference(srcVersion, srcChild.getPath(),
dstVersion, dstVersion,
dstPath, dstPath,
AVMDifference.NEWER)); AVMDifference.NEWER));
continue; continue;
@@ -242,14 +242,14 @@ public class AVMSyncServiceImpl implements AVMSyncService
if (srcChild == null) if (srcChild == null)
{ {
// Missing means the source is older. // Missing means the source is older.
result.add(new AVMDifference(srcVersion, result.add(new AVMDifference(srcVersion,
srcPath, srcPath,
dstVersion, dstChild.getPath(), dstVersion, dstChild.getPath(),
AVMDifference.OLDER)); AVMDifference.OLDER));
continue; continue;
} }
// Otherwise, recursively invoke. // Otherwise, recursively invoke.
compare(srcVersion, srcChild, compare(srcVersion, srcChild,
dstVersion, dstChild, dstVersion, dstChild,
result, excluder); result, excluder);
} }
@@ -294,7 +294,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
AVMNodeDescriptor dstChild = dstList.get(name); AVMNodeDescriptor dstChild = dstList.get(name);
String srcPath = AVMNodeConverter.ExtendAVMPath(srcDesc.getPath(), name); String srcPath = AVMNodeConverter.ExtendAVMPath(srcDesc.getPath(), name);
if (excluder != null && (excluder.matches(srcPath) || if (excluder != null && (excluder.matches(srcPath) ||
excluder.matches(dstChild.getPath()))) excluder.matches(dstChild.getPath())))
{ {
continue; continue;
@@ -313,7 +313,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
} }
} }
/** /**
* Updates the destination nodes in the AVMDifferences * Updates the destination nodes in the AVMDifferences
* with the source nodes. Normally any conflicts or cases in * with the source nodes. Normally any conflicts or cases in
@@ -321,10 +321,10 @@ public class AVMSyncServiceImpl implements AVMSyncService
* will cause the transaction to roll back. * will cause the transaction to roll back.
* @param diffList A List of AVMDifference structs. * @param diffList A List of AVMDifference structs.
* @param excluder A possibly null name matcher to exclude unwanted updates. * @param excluder A possibly null name matcher to exclude unwanted updates.
* @param ignoreConflicts If this is true the update will skip those * @param ignoreConflicts If this is true the update will skip those
* AVMDifferences which are in conflict with * AVMDifferences which are in conflict with
* the destination. * the destination.
* @param ignoreOlder If this is true the update will skip those * @param ignoreOlder If this is true the update will skip those
* AVMDifferences which have the source older than the destination. * AVMDifferences which have the source older than the destination.
* @param overrideConflicts If this is true the update will override conflicting * @param overrideConflicts If this is true the update will override conflicting
* AVMDifferences and replace the destination with the conflicting source. * AVMDifferences and replace the destination with the conflicting source.
@@ -333,10 +333,11 @@ public class AVMSyncServiceImpl implements AVMSyncService
* @param description Full update blurb. * @param description Full update blurb.
* in which the source is older than the destination and overwrite the destination. * in which the source is older than the destination and overwrite the destination.
*/ */
public void update(List<AVMDifference> diffList, public void update(List<AVMDifference> diffList,
NameMatcher excluder, boolean ignoreConflicts, boolean ignoreOlder, NameMatcher excluder, boolean ignoreConflicts, boolean ignoreOlder,
boolean overrideConflicts, boolean overrideOlder, String tag, String description) boolean overrideConflicts, boolean overrideOlder, String tag, String description)
{ {
long start = System.currentTimeMillis();
if (fgLogger.isDebugEnabled()) if (fgLogger.isDebugEnabled())
{ {
try try
@@ -398,7 +399,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
// The default is that the source is newer in the case where // The default is that the source is newer in the case where
// the destination doesn't exist. // the destination doesn't exist.
int diffCode = AVMDifference.NEWER; int diffCode = AVMDifference.NEWER;
if (dstDesc != null) if (dstDesc != null)
{ {
diffCode = compareOne(srcDesc, dstDesc); diffCode = compareOne(srcDesc, dstDesc);
} }
@@ -472,6 +473,10 @@ public class AVMSyncServiceImpl implements AVMSyncService
{ {
fAVMService.createSnapshot(storeName, tag, description); fAVMService.createSnapshot(storeName, tag, description);
} }
if (fgLogger.isDebugEnabled())
{
fgLogger.debug("Raw Update: " + (System.currentTimeMillis() - start));
}
} }
/** /**
@@ -501,7 +506,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
fAVMService.link(parentPath, name, toLink); fAVMService.link(parentPath, name, toLink);
} }
/** /**
* Recursively copy a node into the given position. * Recursively copy a node into the given position.
* @param parentPath The place to put it. * @param parentPath The place to put it.
@@ -521,7 +526,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
recursiveCopy(parentDesc, entry.getKey(), entry.getValue(), excluder); recursiveCopy(parentDesc, entry.getKey(), entry.getValue(), excluder);
} }
} }
/** /**
* Shortcutting helper that uses an AVMNodeDescriptor parent. * Shortcutting helper that uses an AVMNodeDescriptor parent.
* @param parent The parent we are linking into. * @param parent The parent we are linking into.
@@ -531,7 +536,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
private void recursiveCopy(AVMNodeDescriptor parent, String name, AVMNodeDescriptor toCopy, NameMatcher excluder) private void recursiveCopy(AVMNodeDescriptor parent, String name, AVMNodeDescriptor toCopy, NameMatcher excluder)
{ {
String newPath = AVMNodeConverter.ExtendAVMPath(parent.getPath(), name); String newPath = AVMNodeConverter.ExtendAVMPath(parent.getPath(), name);
if (excluder != null && (excluder.matches(newPath) || if (excluder != null && (excluder.matches(newPath) ||
excluder.matches(toCopy.getPath()))) excluder.matches(toCopy.getPath())))
{ {
return; return;
@@ -546,17 +551,17 @@ public class AVMSyncServiceImpl implements AVMSyncService
// children into it. // children into it.
AVMNodeDescriptor newParentDesc = fAVMRepository.createDirectory(parent, name); AVMNodeDescriptor newParentDesc = fAVMRepository.createDirectory(parent, name);
fAVMService.setMetaDataFrom(newParentDesc.getPath(), toCopy); fAVMService.setMetaDataFrom(newParentDesc.getPath(), toCopy);
Map<String, AVMNodeDescriptor> children = Map<String, AVMNodeDescriptor> children =
fAVMService.getDirectoryListing(toCopy, true); fAVMService.getDirectoryListing(toCopy, true);
for (Map.Entry<String, AVMNodeDescriptor> entry : children.entrySet()) for (Map.Entry<String, AVMNodeDescriptor> entry : children.entrySet())
{ {
recursiveCopy(newParentDesc, entry.getKey(), entry.getValue(), excluder); recursiveCopy(newParentDesc, entry.getKey(), entry.getValue(), excluder);
} }
} }
/** /**
* The workhorse of comparison and updating. Determine the versioning relationship * The workhorse of comparison and updating. Determine the versioning relationship
* of two nodes. * of two nodes.
* @param srcDesc Descriptor for the source node. * @param srcDesc Descriptor for the source node.
* @param dstDesc Descriptor for the destination node. * @param dstDesc Descriptor for the destination node.
* @return One of SAME, OLDER, NEWER, CONFLICT, DIRECTORY * @return One of SAME, OLDER, NEWER, CONFLICT, DIRECTORY
@@ -631,7 +636,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
if (common.getId() == dstDesc.getId()) if (common.getId() == dstDesc.getId())
{ {
return AVMDifference.NEWER; return AVMDifference.NEWER;
} }
// Finally we know they are in conflict. // Finally we know they are in conflict.
return AVMDifference.CONFLICT; return AVMDifference.CONFLICT;
@@ -666,12 +671,12 @@ public class AVMSyncServiceImpl implements AVMSyncService
// The must, finally, be in conflict. // The must, finally, be in conflict.
return AVMDifference.CONFLICT; return AVMDifference.CONFLICT;
} }
/** /**
* Flattens a layer so that all all nodes under and including * Flattens a layer so that all all nodes under and including
* <code>layerPath</code> become translucent to any nodes in the * <code>layerPath</code> become translucent to any nodes in the
* corresponding location under and including <code>underlyingPath</code> * corresponding location under and including <code>underlyingPath</code>
* that are the same version. * that are the same version.
* @param layerPath The overlying layer path. * @param layerPath The overlying layer path.
* @param underlyingPath The underlying path. * @param underlyingPath The underlying path.
*/ */
@@ -705,7 +710,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
flatten(layerNode, underlyingNode); flatten(layerNode, underlyingNode);
} }
/** /**
* This is the implementation of flatten. * This is the implementation of flatten.
* @param layer The on top node. * @param layer The on top node.
@@ -747,7 +752,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
return true; return true;
} }
// Grab the listing // Grab the listing
Map<String, AVMNodeDescriptor> underListing = Map<String, AVMNodeDescriptor> underListing =
fAVMService.getDirectoryListing(underlying, true); fAVMService.getDirectoryListing(underlying, true);
boolean flattened = true; boolean flattened = true;
@@ -783,10 +788,10 @@ public class AVMSyncServiceImpl implements AVMSyncService
} }
return flattened; return flattened;
} }
/** /**
* Takes a layer, deletes it and recreates it pointing at the same underlying * Takes a layer, deletes it and recreates it pointing at the same underlying
* node. Any changes in the layer are lost (except to history if the layer has been * node. Any changes in the layer are lost (except to history if the layer has been
* snapshotted.) * snapshotted.)
* @param layerPath * @param layerPath
*/ */
@@ -801,7 +806,7 @@ public class AVMSyncServiceImpl implements AVMSyncService
fAVMService.removeNode(parts[0], parts[1]); fAVMService.removeNode(parts[0], parts[1]);
fAVMService.createLayeredDirectory(desc.getIndirection(), parts[0], parts[1]); fAVMService.createLayeredDirectory(desc.getIndirection(), parts[0], parts[1]);
} }
/** /**
* Make sure this entire directory path exists. * Make sure this entire directory path exists.
* @param path * @param path

View File

@@ -39,7 +39,7 @@ abstract class DirectoryNodeImpl extends AVMNodeImpl implements DirectoryNode
protected DirectoryNodeImpl() protected DirectoryNodeImpl()
{ {
} }
/** /**
* A pass through constructor. Called when a new concrete subclass * A pass through constructor. Called when a new concrete subclass
* instance is created. * instance is created.
@@ -50,7 +50,7 @@ abstract class DirectoryNodeImpl extends AVMNodeImpl implements DirectoryNode
{ {
super(id, repo); super(id, repo);
} }
/** /**
* Dangerous version of link. * Dangerous version of link.
* @param name The name to give the child. * @param name The name to give the child.
@@ -72,5 +72,8 @@ abstract class DirectoryNodeImpl extends AVMNodeImpl implements DirectoryNode
ChildKey key = new ChildKey(this, name); ChildKey key = new ChildKey(this, name);
ChildEntry newChild = new ChildEntryImpl(key, node); ChildEntry newChild = new ChildEntryImpl(key, node);
AVMDAOs.Instance().fChildEntryDAO.save(newChild); AVMDAOs.Instance().fChildEntryDAO.save(newChild);
} AVMDAOs.Instance().fAVMNodeDAO.flush();
AVMDAOs.Instance().fChildEntryDAO.evict(newChild);
AVMDAOs.Instance().fAVMNodeDAO.evict(node);
}
} }

View File

@@ -402,7 +402,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
{ {
listing.put(entry.getKey().getName(), entry.getChild()); listing.put(entry.getKey().getName(), entry.getChild());
} }
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
} }
return listing; return listing;
} }
@@ -421,7 +420,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
{ {
listing.put(entry.getKey().getName(), entry.getChild()); listing.put(entry.getKey().getName(), entry.getChild());
} }
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
} }
return listing; return listing;
} }
@@ -447,8 +445,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
AVMNodeDescriptor childDesc = AVMNodeDescriptor childDesc =
childNode.getDescriptor(dir.getPath(), child.getKey().getName(), dir.getIndirection(), dir.getIndirectionVersion()); childNode.getDescriptor(dir.getPath(), child.getKey().getName(), dir.getIndirection(), dir.getIndirectionVersion());
listing.put(child.getKey().getName(), childDesc); listing.put(child.getKey().getName(), childDesc);
AVMDAOs.Instance().fAVMNodeDAO.evict(childNode);
AVMDAOs.Instance().fChildEntryDAO.evict(child);
} }
return listing; return listing;
} }
@@ -481,7 +477,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
listing.get(name).getDescriptor(dir.getPath(), name, listing.get(name).getDescriptor(dir.getPath(), name,
lookup.getCurrentIndirection(), lookup.getCurrentIndirection(),
lookup.getCurrentIndirectionVersion())); lookup.getCurrentIndirectionVersion()));
AVMDAOs.Instance().fAVMNodeDAO.evict(listing.get(name));
} }
} }
} }
@@ -499,8 +494,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
child.getKey().getName(), child.getKey().getName(),
dir.getIndirection(), dir.getIndirection(),
dir.getIndirectionVersion())); dir.getIndirectionVersion()));
AVMDAOs.Instance().fAVMNodeDAO.evict(child.getChild());
AVMDAOs.Instance().fChildEntryDAO.evict(child);
} }
} }
return baseListing; return baseListing;
@@ -543,7 +536,8 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
{ {
return null; return null;
} }
return new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true); Pair<AVMNode, Boolean> result = new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true);
return result;
} }
// Don't check our underlying directory if we are opaque. // Don't check our underlying directory if we are opaque.
if (fOpacity) if (fOpacity)
@@ -593,8 +587,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
name, name,
mine.getIndirection(), mine.getIndirection(),
mine.getIndirectionVersion()); mine.getIndirectionVersion());
AVMDAOs.Instance().fAVMNodeDAO.evict(entry.getChild());
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
return desc; return desc;
} }
// If we are opaque don't check underneath. // If we are opaque don't check underneath.
@@ -612,7 +604,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
return null; return null;
} }
AVMNodeDescriptor desc = child.getFirst().getDescriptor(lookup); AVMNodeDescriptor desc = child.getFirst().getDescriptor(lookup);
AVMDAOs.Instance().fAVMNodeDAO.evict(child.getFirst());
return desc; return desc;
} }
else else
@@ -952,7 +943,6 @@ class LayeredDirectoryNodeImpl extends DirectoryNodeImpl implements LayeredDirec
{ {
ChildEntry entry = AVMDAOs.Instance().fChildEntryDAO.get(key); ChildEntry entry = AVMDAOs.Instance().fChildEntryDAO.get(key);
AVMDAOs.Instance().fChildEntryDAO.delete(entry); AVMDAOs.Instance().fChildEntryDAO.delete(entry);
AVMDAOs.Instance().fAVMNodeDAO.flush();
} }
} }
// Make the new ChildEntry and save. // Make the new ChildEntry and save.

View File

@@ -115,7 +115,6 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
continue; continue;
} }
result.put(child.getKey().getName(), AVMNodeUnwrapper.Unwrap(child.getChild())); result.put(child.getKey().getName(), AVMNodeUnwrapper.Unwrap(child.getChild()));
AVMDAOs.Instance().fChildEntryDAO.evict(child);
} }
return result; return result;
} }
@@ -166,8 +165,6 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
child.getKey().getName(), child.getKey().getName(),
dir.getIndirection(), dir.getIndirection(),
dir.getIndirectionVersion())); dir.getIndirectionVersion()));
AVMDAOs.Instance().fAVMNodeDAO.evict(child.getChild());
AVMDAOs.Instance().fChildEntryDAO.evict(child);
} }
return result; return result;
} }
@@ -193,14 +190,19 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
{ {
ChildKey key = new ChildKey(this, name); ChildKey key = new ChildKey(this, name);
ChildEntry entry = AVMDAOs.Instance().fChildEntryDAO.get(key); ChildEntry entry = AVMDAOs.Instance().fChildEntryDAO.get(key);
if (entry == null || if (entry == null)
(!includeDeleted && entry.getChild().getType() == AVMNodeType.DELETED_NODE)) {
return null;
}
if (!includeDeleted && entry.getChild().getType() == AVMNodeType.DELETED_NODE)
{ {
return null; return null;
} }
// We're doing the hand unrolling of the proxy because // We're doing the hand unrolling of the proxy because
// Hibernate/CGLIB proxies are broken. // Hibernate/CGLIB proxies are broken.
return new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true);
Pair<AVMNode, Boolean> result = new Pair<AVMNode, Boolean>(AVMNodeUnwrapper.Unwrap(entry.getChild()), true);
return result;
} }
/** /**
@@ -223,8 +225,6 @@ class PlainDirectoryNodeImpl extends DirectoryNodeImpl implements PlainDirectory
return null; return null;
} }
AVMNodeDescriptor desc = entry.getChild().getDescriptor(mine.getPath(), name, (String)null, -1); AVMNodeDescriptor desc = entry.getChild().getDescriptor(mine.getPath(), name, (String)null, -1);
AVMDAOs.Instance().fAVMNodeDAO.evict(entry.getChild());
AVMDAOs.Instance().fChildEntryDAO.evict(entry);
return desc; return desc;
} }

View File

@@ -57,6 +57,7 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
public void save(AVMNode node) public void save(AVMNode node)
{ {
getSession().save(node); getSession().save(node);
SessionCacheChecker.instance.check();
} }
/** /**
@@ -66,6 +67,7 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
public void delete(AVMNode node) public void delete(AVMNode node)
{ {
getSession().delete(node); getSession().delete(node);
SessionCacheChecker.instance.check();
} }
/** /**
@@ -74,6 +76,7 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
*/ */
public AVMNode getByID(long id) public AVMNode getByID(long id)
{ {
SessionCacheChecker.instance.check();
return AVMNodeUnwrapper.Unwrap((AVMNode)getSession().get(AVMNodeImpl.class, id)); return AVMNodeUnwrapper.Unwrap((AVMNode)getSession().get(AVMNodeImpl.class, id));
} }
@@ -215,6 +218,5 @@ class AVMNodeDAOHibernate extends HibernateDaoSupport implements
*/ */
public void evict(AVMNode node) public void evict(AVMNode node)
{ {
getSession().evict(node);
} }
} }

View File

@@ -66,6 +66,7 @@ class ChildEntryDAOHibernate extends HibernateDaoSupport implements
*/ */
public ChildEntry get(ChildKey key) public ChildEntry get(ChildKey key)
{ {
SessionCacheChecker.instance.check();
return (ChildEntry)getSession().get(ChildEntryImpl.class, key); return (ChildEntry)getSession().get(ChildEntryImpl.class, key);
} }
@@ -96,6 +97,7 @@ class ChildEntryDAOHibernate extends HibernateDaoSupport implements
"and ce.child = :child"); "and ce.child = :child");
query.setEntity("parent", parent); query.setEntity("parent", parent);
query.setEntity("child", child); query.setEntity("child", child);
SessionCacheChecker.instance.check();
return (ChildEntry)query.uniqueResult(); return (ChildEntry)query.uniqueResult();
} }
@@ -148,6 +150,5 @@ class ChildEntryDAOHibernate extends HibernateDaoSupport implements
*/ */
public void evict(ChildEntry entry) public void evict(ChildEntry entry)
{ {
getSession().evict(entry);
} }
} }

View File

@@ -0,0 +1,62 @@
/**
*
*/
package org.alfresco.repo.avm.hibernate;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.engine.EntityKey;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* @author britt
*/
public class SessionCacheChecker extends HibernateDaoSupport
{
public static SessionCacheChecker instance = null;
private static Log fgLogger = LogFactory.getLog(SessionCacheChecker.class);
private int fCount = 0;
public SessionCacheChecker()
{
instance = this;
}
public void check()
{
if (!fgLogger.isDebugEnabled())
{
return;
}
if (fCount % 1000 == 0)
{
Map<String, Integer> types = new HashMap<String, Integer>();
Set<EntityKey> keys = (Set<EntityKey>)getSession().getStatistics().getEntityKeys();
if (keys.size() > 200)
{
for (EntityKey key : keys)
{
String name = key.getEntityName();
if (!types.containsKey(name))
{
types.put(name, 0);
}
types.put(name, types.get(name) + 1);
}
fgLogger.debug(types);
// for (Object it : Thread.currentThread().getStackTrace())
// {
// fgLogger.debug(it);
// }
// fCount = 0;
}
}
fCount++;
}
}

View File

@@ -15,11 +15,11 @@
* along with this program; if not, write to the Free Software * along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of * As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre * the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's * and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing * FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here: * the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing * http://www.alfresco.com/legal/licensing
*/ */
package org.alfresco.repo.domain.hibernate; package org.alfresco.repo.domain.hibernate;
@@ -27,6 +27,7 @@ package org.alfresco.repo.domain.hibernate;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Map; import java.util.Map;
import org.alfresco.repo.avm.hibernate.SessionCacheChecker;
import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
import org.alfresco.util.resource.MethodResourceManager; import org.alfresco.util.resource.MethodResourceManager;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@@ -44,9 +45,9 @@ import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
* passes stateful objects back and forth. There must be no <code>Session</code>-linked * passes stateful objects back and forth. There must be no <code>Session</code>-linked
* objects up the stack from where this instance resides. Failure to observe this will * objects up the stack from where this instance resides. Failure to observe this will
* most likely result in data loss of a sporadic nature. * most likely result in data loss of a sporadic nature.
* *
* @see org.alfresco.repo.domain.hibernate.HibernateNodeTest#testPostCommitClearIssue() * @see org.alfresco.repo.domain.hibernate.HibernateNodeTest#testPostCommitClearIssue()
* *
* @author Derek Hulley * @author Derek Hulley
*/ */
public class SessionSizeResourceManager extends HibernateDaoSupport implements MethodResourceManager public class SessionSizeResourceManager extends HibernateDaoSupport implements MethodResourceManager
@@ -55,7 +56,7 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
private static final String KEY_DISABLE_IN_TRANSACTION = "SessionSizeResourceManager.DisableInTransaction"; private static final String KEY_DISABLE_IN_TRANSACTION = "SessionSizeResourceManager.DisableInTransaction";
private static Log logger = LogFactory.getLog(SessionSizeResourceManager.class); private static Log logger = LogFactory.getLog(SessionSizeResourceManager.class);
/** Default 1000 */ /** Default 1000 */
private int threshold; private int threshold;
@@ -71,7 +72,7 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
/** /**
* @return Returns true if the resource management must be ignored in the current transaction. * @return Returns true if the resource management must be ignored in the current transaction.
* If <code>false</code>, the global setting will take effect. * If <code>false</code>, the global setting will take effect.
* *
* @see #setDisableInTransaction() * @see #setDisableInTransaction()
*/ */
public static boolean isDisableInTransaction() public static boolean isDisableInTransaction()
@@ -86,7 +87,7 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
return true; return true;
} }
} }
/** /**
* Default public constructor required for bean instantiation. * Default public constructor required for bean instantiation.
*/ */
@@ -94,14 +95,14 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
{ {
this.threshold = 1000; this.threshold = 1000;
} }
/** /**
* Set the {@link Session#clear()} threshold. If the number of entities and collections in the * Set the {@link Session#clear()} threshold. If the number of entities and collections in the
* current session exceeds this number, then the session will be cleared. Have you read the * current session exceeds this number, then the session will be cleared. Have you read the
* disclaimer? * disclaimer?
* *
* @param threshold the maximum number of entities and associations to keep in memory * @param threshold the maximum number of entities and associations to keep in memory
* *
* @see #threshold * @see #threshold
*/ */
public void setThreshold(int threshold) public void setThreshold(int threshold)
@@ -114,6 +115,11 @@ public class SessionSizeResourceManager extends HibernateDaoSupport implements M
long transactionElapsedTimeNs, long transactionElapsedTimeNs,
Method currentMethod) Method currentMethod)
{ {
if (logger.isDebugEnabled())
{
logger.debug("Session Size Manager Invoked.");
SessionCacheChecker.instance.check();
}
if (isDisableInTransaction()) if (isDisableInTransaction())
{ {
// Don't do anything // Don't do anything

View File

@@ -1,5 +1,5 @@
/** /**
* *
*/ */
package org.alfresco.repo.transaction; package org.alfresco.repo.transaction;
@@ -10,6 +10,7 @@ import org.aopalliance.intercept.MethodInvocation;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.hibernate.StaleObjectStateException; import org.hibernate.StaleObjectStateException;
import org.hibernate.StaleStateException;
import org.hibernate.exception.LockAcquisitionException; import org.hibernate.exception.LockAcquisitionException;
import org.springframework.aop.framework.ReflectiveMethodInvocation; import org.springframework.aop.framework.ReflectiveMethodInvocation;
import org.springframework.dao.ConcurrencyFailureException; import org.springframework.dao.ConcurrencyFailureException;
@@ -19,38 +20,38 @@ import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
/** /**
* *
* @author britt * @author britt
*/ */
public class RetryingTransactionAdvice implements MethodInterceptor public class RetryingTransactionAdvice implements MethodInterceptor
{ {
private static Log fgLogger = LogFactory.getLog(RetryingTransactionAdvice.class); private static Log fgLogger = LogFactory.getLog(RetryingTransactionAdvice.class);
/** /**
* The transaction manager instance. * The transaction manager instance.
*/ */
private PlatformTransactionManager fTxnManager; private PlatformTransactionManager fTxnManager;
/** /**
* The TransactionDefinition. * The TransactionDefinition.
*/ */
private TransactionDefinition fDefinition; private TransactionDefinition fDefinition;
/** /**
* The maximum number of retries. * The maximum number of retries.
*/ */
private int fMaxRetries; private int fMaxRetries;
/** /**
* A Random number generator for getting retry intervals. * A Random number generator for getting retry intervals.
*/ */
private Random fRandom; private Random fRandom;
public RetryingTransactionAdvice() public RetryingTransactionAdvice()
{ {
fRandom = new Random(System.currentTimeMillis()); fRandom = new Random(System.currentTimeMillis());
} }
/** /**
* Setter. * Setter.
*/ */
@@ -66,7 +67,7 @@ public class RetryingTransactionAdvice implements MethodInterceptor
{ {
fDefinition = def; fDefinition = def;
} }
/** /**
* Setter. * Setter.
*/ */
@@ -74,11 +75,11 @@ public class RetryingTransactionAdvice implements MethodInterceptor
{ {
fMaxRetries = maxRetries; fMaxRetries = maxRetries;
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation) * @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation)
*/ */
public Object invoke(MethodInvocation methodInvocation) throws Throwable public Object invoke(MethodInvocation methodInvocation) throws Throwable
{ {
RuntimeException lastException = null; RuntimeException lastException = null;
for (int count = 0; fMaxRetries < -1 || count < fMaxRetries; count++) for (int count = 0; fMaxRetries < -1 || count < fMaxRetries; count++)
@@ -101,9 +102,9 @@ public class RetryingTransactionAdvice implements MethodInterceptor
{ {
fgLogger.debug("Transaction succeeded after " + count + " retries."); fgLogger.debug("Transaction succeeded after " + count + " retries.");
} }
} }
return result; return result;
} }
catch (RuntimeException e) catch (RuntimeException e)
{ {
if (txn != null && isNewTxn && !txn.isCompleted()) if (txn != null && isNewTxn && !txn.isCompleted())
@@ -122,7 +123,8 @@ public class RetryingTransactionAdvice implements MethodInterceptor
if (t instanceof ConcurrencyFailureException || if (t instanceof ConcurrencyFailureException ||
t instanceof DeadlockLoserDataAccessException || t instanceof DeadlockLoserDataAccessException ||
t instanceof StaleObjectStateException || t instanceof StaleObjectStateException ||
t instanceof LockAcquisitionException) t instanceof LockAcquisitionException ||
t instanceof StaleStateException)
{ {
shouldRetry = true; shouldRetry = true;
try try
@@ -131,7 +133,7 @@ public class RetryingTransactionAdvice implements MethodInterceptor
} }
catch (InterruptedException ie) catch (InterruptedException ie)
{ {
// Do nothing. // Do nothing.
} }
break; break;
} }

View File

@@ -15,11 +15,11 @@
* along with this program; if not, write to the Free Software * along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* As a special exception to the terms and conditions of version 2.0 of * As a special exception to the terms and conditions of version 2.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre * the GPL, you may redistribute this Program in connection with Free/Libre
* and Open Source Software ("FLOSS") applications as described in Alfresco's * and Open Source Software ("FLOSS") applications as described in Alfresco's
* FLOSS exception. You should have recieved a copy of the text describing * FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here: * the FLOSS exception, and it is also available here:
* http://www.alfresco.com/legal/licensing" * http://www.alfresco.com/legal/licensing"
*/ */
package org.alfresco.repo.transaction; package org.alfresco.repo.transaction;
@@ -39,6 +39,7 @@ import org.alfresco.service.transaction.TransactionService;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.hibernate.StaleObjectStateException; import org.hibernate.StaleObjectStateException;
import org.hibernate.StaleStateException;
import org.hibernate.exception.LockAcquisitionException; import org.hibernate.exception.LockAcquisitionException;
import org.springframework.dao.ConcurrencyFailureException; import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.DataIntegrityViolationException;
@@ -46,17 +47,17 @@ import org.springframework.dao.DeadlockLoserDataAccessException;
import org.springframework.jdbc.UncategorizedSQLException; import org.springframework.jdbc.UncategorizedSQLException;
/** /**
* A helper that runs a unit of work inside a UserTransaction, * A helper that runs a unit of work inside a UserTransaction,
* transparently retrying the unit of work if the cause of * transparently retrying the unit of work if the cause of
* failure is an optimistic locking or deadlock condition. * failure is an optimistic locking or deadlock condition.
* *
* @author britt * @author britt
*/ */
public class RetryingTransactionHelper public class RetryingTransactionHelper
{ {
private static final String MSG_READ_ONLY = "permissions.err_read_only"; private static final String MSG_READ_ONLY = "permissions.err_read_only";
private static Log logger = LogFactory.getLog(RetryingTransactionHelper.class); private static Log logger = LogFactory.getLog(RetryingTransactionHelper.class);
/** /**
* Exceptions that trigger retries. * Exceptions that trigger retries.
*/ */
@@ -69,30 +70,31 @@ public class RetryingTransactionHelper
StaleObjectStateException.class, StaleObjectStateException.class,
LockAcquisitionException.class, LockAcquisitionException.class,
BatchUpdateException.class, BatchUpdateException.class,
DataIntegrityViolationException.class DataIntegrityViolationException.class,
StaleStateException.class
}; };
} }
/** /**
* Reference to the TransactionService instance. * Reference to the TransactionService instance.
*/ */
private TransactionService txnService; private TransactionService txnService;
/** /**
* The maximum number of retries. -1 for infinity. * The maximum number of retries. -1 for infinity.
*/ */
private int maxRetries; private int maxRetries;
/** /**
* Whether the the transactions may only be reads * Whether the the transactions may only be reads
*/ */
private boolean readOnly; private boolean readOnly;
/** /**
* Random number generator for retry delays. * Random number generator for retry delays.
*/ */
private Random random; private Random random;
/** /**
* Callback interface * Callback interface
* @author britt * @author britt
@@ -101,7 +103,7 @@ public class RetryingTransactionHelper
{ {
/** /**
* Perform a unit of transactional work. * Perform a unit of transactional work.
* *
* @return Return the result of the unit of work * @return Return the result of the unit of work
* @throws Throwable This can be anything and will guarantee either a retry or a rollback * @throws Throwable This can be anything and will guarantee either a retry or a rollback
*/ */
@@ -115,7 +117,7 @@ public class RetryingTransactionHelper
{ {
this.random = new Random(System.currentTimeMillis()); this.random = new Random(System.currentTimeMillis());
} }
// Setters. // Setters.
/** /**
* Set the TransactionService. * Set the TransactionService.
@@ -124,7 +126,7 @@ public class RetryingTransactionHelper
{ {
this.txnService = service; this.txnService = service;
} }
/** /**
* Set the maximimum number of retries. -1 for infinity. * Set the maximimum number of retries. -1 for infinity.
*/ */
@@ -132,7 +134,7 @@ public class RetryingTransactionHelper
{ {
this.maxRetries = maxRetries; this.maxRetries = maxRetries;
} }
/** /**
* Set whether this helper only supports read transactions. * Set whether this helper only supports read transactions.
*/ */
@@ -142,15 +144,15 @@ public class RetryingTransactionHelper
} }
/** /**
* Execute a callback in a transaction until it succeeds, fails * Execute a callback in a transaction until it succeeds, fails
* because of an error not the result of an optimistic locking failure, * because of an error not the result of an optimistic locking failure,
* or a deadlock loser failure, or until a maximum number of retries have * or a deadlock loser failure, or until a maximum number of retries have
* been attempted. * been attempted.
* <p> * <p>
* If there is already an active transaction, then the callback is merely * If there is already an active transaction, then the callback is merely
* executed and any retry logic is left to the caller. The transaction * executed and any retry logic is left to the caller. The transaction
* will attempt to be read-write. * will attempt to be read-write.
* *
* @param cb The callback containing the unit of work. * @param cb The callback containing the unit of work.
* @return Returns the result of the unit of work. * @return Returns the result of the unit of work.
* @throws RuntimeException all checked exceptions are converted * @throws RuntimeException all checked exceptions are converted
@@ -159,16 +161,16 @@ public class RetryingTransactionHelper
{ {
return doInTransaction(cb, false, false); return doInTransaction(cb, false, false);
} }
/** /**
* Execute a callback in a transaction until it succeeds, fails * Execute a callback in a transaction until it succeeds, fails
* because of an error not the result of an optimistic locking failure, * because of an error not the result of an optimistic locking failure,
* or a deadlock loser failure, or until a maximum number of retries have * or a deadlock loser failure, or until a maximum number of retries have
* been attempted. * been attempted.
* <p> * <p>
* If there is already an active transaction, then the callback is merely * If there is already an active transaction, then the callback is merely
* executed and any retry logic is left to the caller. * executed and any retry logic is left to the caller.
* *
* @param cb The callback containing the unit of work. * @param cb The callback containing the unit of work.
* @param readOnly Whether this is a read only transaction. * @param readOnly Whether this is a read only transaction.
* @return Returns the result of the unit of work. * @return Returns the result of the unit of work.
@@ -178,16 +180,16 @@ public class RetryingTransactionHelper
{ {
return doInTransaction(cb, readOnly, false); return doInTransaction(cb, readOnly, false);
} }
/** /**
* Execute a callback in a transaction until it succeeds, fails * Execute a callback in a transaction until it succeeds, fails
* because of an error not the result of an optimistic locking failure, * because of an error not the result of an optimistic locking failure,
* or a deadlock loser failure, or until a maximum number of retries have * or a deadlock loser failure, or until a maximum number of retries have
* been attempted. * been attempted.
* <p> * <p>
* It is possible to force a new transaction to be created or to partake in * It is possible to force a new transaction to be created or to partake in
* any existing transaction. * any existing transaction.
* *
* @param cb The callback containing the unit of work. * @param cb The callback containing the unit of work.
* @param readOnly Whether this is a read only transaction. * @param readOnly Whether this is a read only transaction.
* @param requiresNew <tt>true</tt> to force a new transaction or * @param requiresNew <tt>true</tt> to force a new transaction or
@@ -274,7 +276,7 @@ public class RetryingTransactionHelper
// Rollback if we can. // Rollback if we can.
if (txn != null) if (txn != null)
{ {
try try
{ {
int txnStatus = txn.getStatus(); int txnStatus = txn.getStatus();
// We can only rollback if a transaction was started (NOT NO_TRANSACTION) and // We can only rollback if a transaction was started (NOT NO_TRANSACTION) and
@@ -285,18 +287,18 @@ public class RetryingTransactionHelper
{ {
txn.rollback(); txn.rollback();
} }
} }
catch (IllegalStateException e1) catch (IllegalStateException e1)
{
logger.error(e);
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
}
catch (SecurityException e1)
{ {
logger.error(e); logger.error(e);
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1); throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
} }
catch (SystemException e1) catch (SecurityException e1)
{
logger.error(e);
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
}
catch (SystemException e1)
{ {
logger.error(e); logger.error(e);
throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1); throw new AlfrescoRuntimeException("Failure during rollback: " + cb, e1);
@@ -309,7 +311,7 @@ public class RetryingTransactionHelper
} }
else else
{ {
lastException = (e instanceof RuntimeException) ? lastException = (e instanceof RuntimeException) ?
(RuntimeException)e : new AlfrescoRuntimeException("Exception in Transaction.", e); (RuntimeException)e : new AlfrescoRuntimeException("Exception in Transaction.", e);
} }
// Check if there is a cause for retrying // Check if there is a cause for retrying
@@ -324,7 +326,7 @@ public class RetryingTransactionHelper
} }
catch (InterruptedException ie) catch (InterruptedException ie)
{ {
// Do nothing. // Do nothing.
} }
// Try again // Try again
continue; continue;
@@ -340,10 +342,10 @@ public class RetryingTransactionHelper
// So, fail. // So, fail.
throw lastException; throw lastException;
} }
/** /**
* Sometimes, the exception means retry and sometimes not. * Sometimes, the exception means retry and sometimes not.
* *
* @param cause the cause to examine * @param cause the cause to examine
* @return Returns the original cause if it is a valid retry cause, otherwise <tt>null</tt> * @return Returns the original cause if it is a valid retry cause, otherwise <tt>null</tt>
*/ */