mirror of
https://github.com/Alfresco/alfresco-community-repo.git
synced 2025-07-24 17:32:48 +00:00
Afternoon merge.
git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/BRANCHES/WCM-DEV2/root@2915 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261
This commit is contained in:
@@ -2,7 +2,7 @@
|
|||||||
# Hibernate configuration
|
# Hibernate configuration
|
||||||
#
|
#
|
||||||
hibernate.jdbc.use_streams_for_binary=true
|
hibernate.jdbc.use_streams_for_binary=true
|
||||||
hibernate.dialect=org.hibernate.dialect.MySQLDialect
|
hibernate.dialect=org.hibernate.dialect.MySQLInnoDBDialect
|
||||||
hibernate.show_sql=false
|
hibernate.show_sql=false
|
||||||
hibernate.hbm2ddl.auto=update
|
hibernate.hbm2ddl.auto=update
|
||||||
hibernate.cache.use_query_cache=true
|
hibernate.cache.use_query_cache=true
|
||||||
|
@@ -1,5 +1,10 @@
|
|||||||
|
# Sample Hibernate configuration to disnable Hibernate schema updates
|
||||||
|
# Values are "validate" or "update" (default)
|
||||||
|
#hibernate.hbm2ddl.auto=validate
|
||||||
|
|
||||||
#
|
#
|
||||||
# Sample Hibernate configuration for changing Database dialect
|
# Sample Hibernate configuration for changing Database dialect
|
||||||
|
# For a full list: http://www.hibernate.org/hib_docs/v3/reference/en/html_single/#configuration-optional-dialects
|
||||||
#
|
#
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@@ -279,8 +279,8 @@
|
|||||||
<property name="id"><value>patch.forumsIcons</value></property>
|
<property name="id"><value>patch.forumsIcons</value></property>
|
||||||
<property name="description"><value>patch.forumsIcons.description</value></property>
|
<property name="description"><value>patch.forumsIcons.description</value></property>
|
||||||
<property name="fixesFromSchema"><value>0</value></property>
|
<property name="fixesFromSchema"><value>0</value></property>
|
||||||
<property name="fixesToSchema"><value>7</value></property>
|
<property name="fixesToSchema"><value>12</value></property>
|
||||||
<property name="targetSchema"><value>8</value></property>
|
<property name="targetSchema"><value>13</value></property>
|
||||||
<property name="importerBootstrap">
|
<property name="importerBootstrap">
|
||||||
<ref bean="spacesBootstrap" />
|
<ref bean="spacesBootstrap" />
|
||||||
</property>
|
</property>
|
||||||
@@ -289,8 +289,8 @@
|
|||||||
<property name="id"><value>patch.emailTemplatesFolder</value></property>
|
<property name="id"><value>patch.emailTemplatesFolder</value></property>
|
||||||
<property name="description"><value>patch.emailTemplatesFolder.description</value></property>
|
<property name="description"><value>patch.emailTemplatesFolder.description</value></property>
|
||||||
<property name="fixesFromSchema"><value>0</value></property>
|
<property name="fixesFromSchema"><value>0</value></property>
|
||||||
<property name="fixesToSchema"><value>8</value></property>
|
<property name="fixesToSchema"><value>12</value></property>
|
||||||
<property name="targetSchema"><value>9</value></property>
|
<property name="targetSchema"><value>13</value></property>
|
||||||
<!-- helper beans for execution -->
|
<!-- helper beans for execution -->
|
||||||
<property name="importerBootstrap">
|
<property name="importerBootstrap">
|
||||||
<ref bean="spacesBootstrap" />
|
<ref bean="spacesBootstrap" />
|
||||||
@@ -303,8 +303,8 @@
|
|||||||
<property name="id"><value>patch.emailTemplatesContent</value></property>
|
<property name="id"><value>patch.emailTemplatesContent</value></property>
|
||||||
<property name="description"><value>patch.emailTemplatesContent.description</value></property>
|
<property name="description"><value>patch.emailTemplatesContent.description</value></property>
|
||||||
<property name="fixesFromSchema"><value>0</value></property>
|
<property name="fixesFromSchema"><value>0</value></property>
|
||||||
<property name="fixesToSchema"><value>9</value></property>
|
<property name="fixesToSchema"><value>12</value></property>
|
||||||
<property name="targetSchema"><value>10</value></property>
|
<property name="targetSchema"><value>13</value></property>
|
||||||
<property name="templatesACP"><value>alfresco/templates/email_templates.acp</value></property>
|
<property name="templatesACP"><value>alfresco/templates/email_templates.acp</value></property>
|
||||||
<!-- helper beans for execution -->
|
<!-- helper beans for execution -->
|
||||||
<property name="importerBootstrap">
|
<property name="importerBootstrap">
|
||||||
|
@@ -134,7 +134,7 @@
|
|||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<!-- enable DEBUG for 'org.alfresco.repo.cache.EhCacheTracerJob' to activate -->
|
<!-- enable DEBUG for 'org.alfresco.repo.cache.EhCacheTracerJob' and enable scheduler property to activate -->
|
||||||
<bean id="ehCacheTracerJob" class="org.alfresco.util.TriggerBean">
|
<bean id="ehCacheTracerJob" class="org.alfresco.util.TriggerBean">
|
||||||
<property name="jobDetail">
|
<property name="jobDetail">
|
||||||
<bean id="ehCacheTracerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
|
<bean id="ehCacheTracerJobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
|
||||||
@@ -143,6 +143,11 @@
|
|||||||
</property>
|
</property>
|
||||||
</bean>
|
</bean>
|
||||||
</property>
|
</property>
|
||||||
|
<!-- enable this to activate bean
|
||||||
|
<property name="scheduler">
|
||||||
|
<ref bean="schedulerFactory" />
|
||||||
|
</property>
|
||||||
|
-->
|
||||||
<property name="startDelay">
|
<property name="startDelay">
|
||||||
<value>3600000</value><!-- start after an hour -->
|
<value>3600000</value><!-- start after an hour -->
|
||||||
</property>
|
</property>
|
||||||
|
@@ -82,6 +82,7 @@ public class EhCacheTracerJob implements Job
|
|||||||
|
|
||||||
long maxHeapSize = Runtime.getRuntime().maxMemory();
|
long maxHeapSize = Runtime.getRuntime().maxMemory();
|
||||||
long totalSize = 0L;
|
long totalSize = 0L;
|
||||||
|
double estimatedMaxSize = 0L;
|
||||||
// get all the caches
|
// get all the caches
|
||||||
String[] cacheNames = cacheManager.getCacheNames();
|
String[] cacheNames = cacheManager.getCacheNames();
|
||||||
logger.debug("Dumping EHCache info:");
|
logger.debug("Dumping EHCache info:");
|
||||||
@@ -97,13 +98,18 @@ public class EhCacheTracerJob implements Job
|
|||||||
logger.debug(analysis);
|
logger.debug(analysis);
|
||||||
// get the size
|
// get the size
|
||||||
totalSize += analysis.getSize();
|
totalSize += analysis.getSize();
|
||||||
|
estimatedMaxSize += Double.isNaN(analysis.getEstimatedMaxSize()) ? 0.0 : analysis.getEstimatedMaxSize();
|
||||||
}
|
}
|
||||||
// check the size
|
// check the size
|
||||||
double sizePercentage = (double)totalSize / (double)maxHeapSize * 100.0;
|
double sizePercentage = (double)totalSize / (double)maxHeapSize * 100.0;
|
||||||
|
double maxSizePercentage = estimatedMaxSize / (double)maxHeapSize * 100.0;
|
||||||
String msg = String.format(
|
String msg = String.format(
|
||||||
"EHCaches currently consume %5.2f MB or %3.2f percent of system VM size",
|
"EHCaches currently consume %5.2f MB or %3.2f percent of system VM size. \n" +
|
||||||
|
"The estimated maximum size is %5.2f MB or %3.2f percent of system VM size.",
|
||||||
(double)totalSize / 1024.0 / 1024.0,
|
(double)totalSize / 1024.0 / 1024.0,
|
||||||
sizePercentage);
|
sizePercentage,
|
||||||
|
estimatedMaxSize / 1024.0 / 1024.0,
|
||||||
|
maxSizePercentage);
|
||||||
logger.debug(msg);
|
logger.debug(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -111,6 +117,13 @@ public class EhCacheTracerJob implements Job
|
|||||||
{
|
{
|
||||||
private Cache cache;
|
private Cache cache;
|
||||||
private long size = 0L;
|
private long size = 0L;
|
||||||
|
double sizeMB;
|
||||||
|
long maxSize;
|
||||||
|
long currentSize;
|
||||||
|
long hitCount;
|
||||||
|
long missCount;
|
||||||
|
double percentageFull;
|
||||||
|
double estMaxSize;
|
||||||
|
|
||||||
public CacheAnalysis(Cache cache) throws CacheException
|
public CacheAnalysis(Cache cache) throws CacheException
|
||||||
{
|
{
|
||||||
@@ -133,6 +146,11 @@ public class EhCacheTracerJob implements Job
|
|||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public synchronized double getEstimatedMaxSize()
|
||||||
|
{
|
||||||
|
return estMaxSize;
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private synchronized void calculateSize() throws CacheException
|
private synchronized void calculateSize() throws CacheException
|
||||||
{
|
{
|
||||||
@@ -143,6 +161,13 @@ public class EhCacheTracerJob implements Job
|
|||||||
Element element = cache.get(key);
|
Element element = cache.get(key);
|
||||||
size += getSize(element);
|
size += getSize(element);
|
||||||
}
|
}
|
||||||
|
sizeMB = (double)size/1024.0/1024.0;
|
||||||
|
maxSize = cache.getMaxElementsInMemory();
|
||||||
|
currentSize = cache.getMemoryStoreSize();
|
||||||
|
hitCount = cache.getHitCount();
|
||||||
|
missCount = cache.getMissCountNotFound();
|
||||||
|
percentageFull = (double)currentSize / (double)maxSize * 100.0;
|
||||||
|
estMaxSize = size / (double) currentSize * (double) maxSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getSize(Serializable obj)
|
private long getSize(Serializable obj)
|
||||||
|
@@ -30,9 +30,11 @@ public interface DbAccessControlList
|
|||||||
{
|
{
|
||||||
public long getId();
|
public long getId();
|
||||||
|
|
||||||
public Node getNode();
|
/**
|
||||||
|
*
|
||||||
public void setNode(Node node);
|
* @return Returns the access control entries for this access control list
|
||||||
|
*/
|
||||||
|
public Set<DbAccessControlEntry> getEntries();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@@ -57,11 +57,6 @@ public class ChildAssocImpl implements ChildAssoc
|
|||||||
// add the forward associations
|
// add the forward associations
|
||||||
this.setParent(parentNode);
|
this.setParent(parentNode);
|
||||||
this.setChild(childNode);
|
this.setChild(childNode);
|
||||||
// Force initialization of the inverse collections
|
|
||||||
// so that we don't queue additions to them.
|
|
||||||
// This can go if we move to set-based collections
|
|
||||||
parentNode.getChildAssocs().size();
|
|
||||||
childNode.getParentAssocs().size();
|
|
||||||
// add the inverse associations
|
// add the inverse associations
|
||||||
parentNode.getChildAssocs().add(this);
|
parentNode.getChildAssocs().add(this);
|
||||||
childNode.getParentAssocs().add(this);
|
childNode.getParentAssocs().add(this);
|
||||||
|
@@ -26,8 +26,6 @@ import org.alfresco.repo.domain.DbAccessControlList;
|
|||||||
import org.alfresco.repo.domain.DbAuthority;
|
import org.alfresco.repo.domain.DbAuthority;
|
||||||
import org.alfresco.repo.domain.DbPermission;
|
import org.alfresco.repo.domain.DbPermission;
|
||||||
import org.alfresco.repo.domain.DbPermissionKey;
|
import org.alfresco.repo.domain.DbPermissionKey;
|
||||||
import org.alfresco.repo.domain.Node;
|
|
||||||
import org.alfresco.util.EqualsHelper;
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
@@ -42,7 +40,6 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces
|
|||||||
private static Log logger = LogFactory.getLog(DbAccessControlListImpl.class);
|
private static Log logger = LogFactory.getLog(DbAccessControlListImpl.class);
|
||||||
|
|
||||||
private long id;
|
private long id;
|
||||||
private Node node;
|
|
||||||
private Set<DbAccessControlEntry> entries;
|
private Set<DbAccessControlEntry> entries;
|
||||||
private boolean inherits;
|
private boolean inherits;
|
||||||
|
|
||||||
@@ -57,7 +54,6 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces
|
|||||||
StringBuilder sb = new StringBuilder(128);
|
StringBuilder sb = new StringBuilder(128);
|
||||||
sb.append("DbAccessControlListImpl")
|
sb.append("DbAccessControlListImpl")
|
||||||
.append("[ id=").append(id)
|
.append("[ id=").append(id)
|
||||||
.append(", node=").append(node)
|
|
||||||
.append(", entries=").append(entries.size())
|
.append(", entries=").append(entries.size())
|
||||||
.append(", inherits=").append(inherits)
|
.append(", inherits=").append(inherits)
|
||||||
.append("]");
|
.append("]");
|
||||||
@@ -77,14 +73,13 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces
|
|||||||
}
|
}
|
||||||
DbAccessControlList other = (DbAccessControlList) o;
|
DbAccessControlList other = (DbAccessControlList) o;
|
||||||
|
|
||||||
return (this.inherits == other.getInherits())
|
return (this.inherits == other.getInherits());
|
||||||
&& (EqualsHelper.nullSafeEquals(this.node, other.getNode()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode()
|
public int hashCode()
|
||||||
{
|
{
|
||||||
return (node == null ? 0 : node.hashCode());
|
return (inherits == false ? 0 : 17);
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getId()
|
public long getId()
|
||||||
@@ -101,16 +96,6 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces
|
|||||||
this.id = id;
|
this.id = id;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Node getNode()
|
|
||||||
{
|
|
||||||
return node;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setNode(Node node)
|
|
||||||
{
|
|
||||||
this.node = node;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Set<DbAccessControlEntry> getEntries()
|
public Set<DbAccessControlEntry> getEntries()
|
||||||
{
|
{
|
||||||
return entries;
|
return entries;
|
||||||
|
@@ -28,6 +28,7 @@ import javax.transaction.UserTransaction;
|
|||||||
|
|
||||||
import org.alfresco.model.ContentModel;
|
import org.alfresco.model.ContentModel;
|
||||||
import org.alfresco.repo.domain.ChildAssoc;
|
import org.alfresco.repo.domain.ChildAssoc;
|
||||||
|
import org.alfresco.repo.domain.DbAccessControlList;
|
||||||
import org.alfresco.repo.domain.Node;
|
import org.alfresco.repo.domain.Node;
|
||||||
import org.alfresco.repo.domain.NodeAssoc;
|
import org.alfresco.repo.domain.NodeAssoc;
|
||||||
import org.alfresco.repo.domain.NodeKey;
|
import org.alfresco.repo.domain.NodeKey;
|
||||||
@@ -35,12 +36,14 @@ import org.alfresco.repo.domain.NodeStatus;
|
|||||||
import org.alfresco.repo.domain.PropertyValue;
|
import org.alfresco.repo.domain.PropertyValue;
|
||||||
import org.alfresco.repo.domain.Store;
|
import org.alfresco.repo.domain.Store;
|
||||||
import org.alfresco.repo.domain.StoreKey;
|
import org.alfresco.repo.domain.StoreKey;
|
||||||
|
import org.alfresco.repo.transaction.AlfrescoTransactionSupport;
|
||||||
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
|
||||||
import org.alfresco.service.cmr.repository.StoreRef;
|
import org.alfresco.service.cmr.repository.StoreRef;
|
||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.namespace.QName;
|
||||||
import org.alfresco.service.transaction.TransactionService;
|
import org.alfresco.service.transaction.TransactionService;
|
||||||
import org.alfresco.util.BaseSpringTest;
|
import org.alfresco.util.BaseSpringTest;
|
||||||
import org.alfresco.util.GUID;
|
import org.alfresco.util.GUID;
|
||||||
|
import org.hibernate.CacheMode;
|
||||||
import org.hibernate.exception.ConstraintViolationException;
|
import org.hibernate.exception.ConstraintViolationException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -385,6 +388,133 @@ public class HibernateNodeTest extends BaseSpringTest
|
|||||||
{
|
{
|
||||||
txn.rollback();
|
txn.rollback();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create some simple parent-child relationships and flush them. Then read them back in without
|
||||||
|
* using the L2 cache.
|
||||||
|
*/
|
||||||
|
public void testQueryJoins() throws Exception
|
||||||
|
{
|
||||||
|
getSession().setCacheMode(CacheMode.IGNORE);
|
||||||
|
|
||||||
|
// make a container node
|
||||||
|
Node containerNode = new NodeImpl();
|
||||||
|
containerNode.setStore(store);
|
||||||
|
containerNode.setUuid(GUID.generate());
|
||||||
|
containerNode.setTypeQName(ContentModel.TYPE_CONTAINER);
|
||||||
|
containerNode.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
|
||||||
|
containerNode.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
|
||||||
|
containerNode.getAspects().add(ContentModel.ASPECT_AUDITABLE);
|
||||||
|
Serializable containerNodeId = getSession().save(containerNode);
|
||||||
|
NodeKey containerNodeKey = new NodeKey(containerNode.getNodeRef());
|
||||||
|
NodeStatus containerNodeStatus = new NodeStatusImpl();
|
||||||
|
containerNodeStatus.setKey(containerNodeKey);
|
||||||
|
containerNodeStatus.setNode(containerNode);
|
||||||
|
containerNodeStatus.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId());
|
||||||
|
getSession().save(containerNodeStatus);
|
||||||
|
// make content node 1
|
||||||
|
Node contentNode1 = new NodeImpl();
|
||||||
|
contentNode1.setStore(store);
|
||||||
|
contentNode1.setUuid(GUID.generate());
|
||||||
|
contentNode1.setTypeQName(ContentModel.TYPE_CONTENT);
|
||||||
|
contentNode1.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
|
||||||
|
contentNode1.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
|
||||||
|
contentNode1.getAspects().add(ContentModel.ASPECT_AUDITABLE);
|
||||||
|
Serializable contentNode1Id = getSession().save(contentNode1);
|
||||||
|
NodeKey contentNodeKey1 = new NodeKey(contentNode1.getNodeRef());
|
||||||
|
NodeStatus contentNodeStatus1 = new NodeStatusImpl();
|
||||||
|
contentNodeStatus1.setKey(contentNodeKey1);
|
||||||
|
contentNodeStatus1.setNode(contentNode1);
|
||||||
|
contentNodeStatus1.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId());
|
||||||
|
getSession().save(contentNodeStatus1);
|
||||||
|
// make content node 2
|
||||||
|
Node contentNode2 = new NodeImpl();
|
||||||
|
contentNode2.setStore(store);
|
||||||
|
contentNode2.setUuid(GUID.generate());
|
||||||
|
contentNode2.setTypeQName(ContentModel.TYPE_CONTENT);
|
||||||
|
Serializable contentNode2Id = getSession().save(contentNode2);
|
||||||
|
contentNode2.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
|
||||||
|
contentNode2.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC"));
|
||||||
|
contentNode2.getAspects().add(ContentModel.ASPECT_AUDITABLE);
|
||||||
|
NodeKey contentNodeKey2 = new NodeKey(contentNode2.getNodeRef());
|
||||||
|
NodeStatus contentNodeStatus2 = new NodeStatusImpl();
|
||||||
|
contentNodeStatus2.setKey(contentNodeKey2);
|
||||||
|
contentNodeStatus2.setNode(contentNode2);
|
||||||
|
contentNodeStatus2.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId());
|
||||||
|
getSession().save(contentNodeStatus2);
|
||||||
|
// create an association to content 1
|
||||||
|
ChildAssoc assoc1 = new ChildAssocImpl();
|
||||||
|
assoc1.setIsPrimary(true);
|
||||||
|
assoc1.setTypeQName(QName.createQName(null, "type1"));
|
||||||
|
assoc1.setQname(QName.createQName(null, "number1"));
|
||||||
|
assoc1.buildAssociation(containerNode, contentNode1);
|
||||||
|
getSession().save(assoc1);
|
||||||
|
// create an association to content 2
|
||||||
|
ChildAssoc assoc2 = new ChildAssocImpl();
|
||||||
|
assoc2.setIsPrimary(true);
|
||||||
|
assoc2.setTypeQName(QName.createQName(null, "type2"));
|
||||||
|
assoc2.setQname(QName.createQName(null, "number2"));
|
||||||
|
assoc2.buildAssociation(containerNode, contentNode2);
|
||||||
|
getSession().save(assoc2);
|
||||||
|
|
||||||
|
// make sure that there are no entities cached in either L1 or L2
|
||||||
|
getSession().flush();
|
||||||
|
getSession().clear();
|
||||||
|
|
||||||
|
// now read the structure back in from the container down
|
||||||
|
containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey);
|
||||||
|
containerNode = containerNodeStatus.getNode();
|
||||||
|
Collection<ChildAssoc> assocs = containerNode.getChildAssocs();
|
||||||
|
for (ChildAssoc assoc : assocs)
|
||||||
|
{
|
||||||
|
Node childNode = assoc.getChild();
|
||||||
|
Store store = childNode.getStore();
|
||||||
|
childNode.getAspects().size();
|
||||||
|
childNode.getProperties().size();
|
||||||
|
childNode.getParentAssocs().size();
|
||||||
|
childNode.getChildAssocs().size();
|
||||||
|
childNode.getSourceNodeAssocs().size();
|
||||||
|
childNode.getTargetNodeAssocs().size();
|
||||||
|
DbAccessControlList acl = childNode.getAccessControlList();
|
||||||
|
if (acl != null)
|
||||||
|
{
|
||||||
|
acl.getEntries().size();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// clear out again
|
||||||
|
getSession().clear();
|
||||||
|
|
||||||
|
// now remove a property from each child
|
||||||
|
containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey);
|
||||||
|
containerNode = containerNodeStatus.getNode();
|
||||||
|
assocs = containerNode.getChildAssocs();
|
||||||
|
for (ChildAssoc assoc : assocs)
|
||||||
|
{
|
||||||
|
Node childNode = assoc.getChild();
|
||||||
|
PropertyValue removed = childNode.getProperties().remove(ContentModel.PROP_ARCHIVED_BY);
|
||||||
|
assertNotNull("Property was not present", removed);
|
||||||
|
}
|
||||||
|
// expect that just the specific property gets removed in the delete statement
|
||||||
|
getSession().flush();
|
||||||
|
getSession().clear();
|
||||||
|
|
||||||
|
// Create a second association to content 2
|
||||||
|
// create an association to content 2
|
||||||
|
containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey);
|
||||||
|
containerNode = containerNodeStatus.getNode();
|
||||||
|
contentNodeStatus2 = (NodeStatus) getSession().get(NodeStatusImpl.class, contentNodeKey2);
|
||||||
|
contentNode2 = contentNodeStatus2.getNode();
|
||||||
|
ChildAssoc assoc3 = new ChildAssocImpl();
|
||||||
|
assoc3.setIsPrimary(false);
|
||||||
|
assoc3.setTypeQName(QName.createQName(null, "type3"));
|
||||||
|
assoc3.setQname(QName.createQName(null, "number3"));
|
||||||
|
assoc3.buildAssociation(containerNode, contentNode2); // check whether the children are pulled in for this
|
||||||
|
getSession().save(assoc3);
|
||||||
|
|
||||||
|
// flush it
|
||||||
|
getSession().flush();
|
||||||
|
getSession().clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -38,13 +38,15 @@
|
|||||||
<property name="uuid" column="uuid" type="string" length="36" />
|
<property name="uuid" column="uuid" type="string" length="36" />
|
||||||
</natural-id>
|
</natural-id>
|
||||||
<property name="typeQName" column="type_qname" type="QName" length="255" not-null="true" />
|
<property name="typeQName" column="type_qname" type="QName" length="255" not-null="true" />
|
||||||
<!-- inverse assoc to access control list -->
|
<!-- forward assoc to access control list (optional) -->
|
||||||
<one-to-one
|
<many-to-one
|
||||||
name="accessControlList"
|
name="accessControlList"
|
||||||
class="org.alfresco.repo.domain.hibernate.DbAccessControlListImpl"
|
class="org.alfresco.repo.domain.hibernate.DbAccessControlListImpl"
|
||||||
property-ref="node"
|
column="acl_id"
|
||||||
lazy="false"
|
lazy="false"
|
||||||
fetch="join"
|
fetch="join"
|
||||||
|
unique="false"
|
||||||
|
not-null="false"
|
||||||
cascade="delete" />
|
cascade="delete" />
|
||||||
<!-- forward assoc to properties -->
|
<!-- forward assoc to properties -->
|
||||||
<map
|
<map
|
||||||
@@ -87,8 +89,8 @@
|
|||||||
<set
|
<set
|
||||||
name="parentAssocs"
|
name="parentAssocs"
|
||||||
inverse="true"
|
inverse="true"
|
||||||
lazy="true"
|
lazy="false"
|
||||||
fetch="select"
|
fetch="join"
|
||||||
cascade="none"
|
cascade="none"
|
||||||
optimistic-lock="true" >
|
optimistic-lock="true" >
|
||||||
<key column="child_node_id" />
|
<key column="child_node_id" />
|
||||||
@@ -98,8 +100,8 @@
|
|||||||
<set
|
<set
|
||||||
name="childAssocs"
|
name="childAssocs"
|
||||||
inverse="true"
|
inverse="true"
|
||||||
lazy="true"
|
lazy="false"
|
||||||
fetch="select"
|
fetch="join"
|
||||||
cascade="none"
|
cascade="none"
|
||||||
optimistic-lock="true" >
|
optimistic-lock="true" >
|
||||||
<key column="parent_node_id" />
|
<key column="parent_node_id" />
|
||||||
@@ -173,8 +175,8 @@
|
|||||||
<many-to-one
|
<many-to-one
|
||||||
name="parent"
|
name="parent"
|
||||||
class="org.alfresco.repo.domain.hibernate.NodeImpl"
|
class="org.alfresco.repo.domain.hibernate.NodeImpl"
|
||||||
lazy="false"
|
lazy="proxy"
|
||||||
fetch="join"
|
fetch="select"
|
||||||
optimistic-lock="true"
|
optimistic-lock="true"
|
||||||
not-null="true" >
|
not-null="true" >
|
||||||
<column name="parent_node_id" />
|
<column name="parent_node_id" />
|
||||||
@@ -182,8 +184,8 @@
|
|||||||
<!-- forward assoc to child node -->
|
<!-- forward assoc to child node -->
|
||||||
<many-to-one
|
<many-to-one
|
||||||
name="child"
|
name="child"
|
||||||
lazy="false"
|
lazy="proxy"
|
||||||
fetch="join"
|
fetch="select"
|
||||||
class="org.alfresco.repo.domain.hibernate.NodeImpl"
|
class="org.alfresco.repo.domain.hibernate.NodeImpl"
|
||||||
optimistic-lock="true"
|
optimistic-lock="true"
|
||||||
not-null="true" >
|
not-null="true" >
|
||||||
|
@@ -54,11 +54,6 @@ public class NodeAssocImpl implements NodeAssoc
|
|||||||
// add the forward associations
|
// add the forward associations
|
||||||
this.setTarget(targetNode);
|
this.setTarget(targetNode);
|
||||||
this.setSource(sourceNode);
|
this.setSource(sourceNode);
|
||||||
// Force initialization of the inverse collections
|
|
||||||
// so that we don't queue additions to them.
|
|
||||||
// This can go if we move to set-based collections
|
|
||||||
sourceNode.getSourceNodeAssocs().size();
|
|
||||||
targetNode.getTargetNodeAssocs().size();
|
|
||||||
// add the inverse associations
|
// add the inverse associations
|
||||||
sourceNode.getTargetNodeAssocs().add(this);
|
sourceNode.getTargetNodeAssocs().add(this);
|
||||||
targetNode.getSourceNodeAssocs().add(this);
|
targetNode.getSourceNodeAssocs().add(this);
|
||||||
|
@@ -19,13 +19,15 @@
|
|||||||
<generator class="native" />
|
<generator class="native" />
|
||||||
</id>
|
</id>
|
||||||
|
|
||||||
<many-to-one
|
<set name="entries"
|
||||||
name="node"
|
inverse="true"
|
||||||
class="org.alfresco.repo.domain.hibernate.NodeImpl"
|
lazy="false"
|
||||||
unique="true"
|
cascade="delete"
|
||||||
not-null="true">
|
optimistic-lock="true"
|
||||||
<column name="node_id" />
|
fetch="join" >
|
||||||
</many-to-one>
|
<key column="acl_id" />
|
||||||
|
<one-to-many class="org.alfresco.repo.domain.hibernate.DbAccessControlEntryImpl" />
|
||||||
|
</set>
|
||||||
|
|
||||||
<set name="entries"
|
<set name="entries"
|
||||||
inverse="true"
|
inverse="true"
|
||||||
|
@@ -39,7 +39,6 @@ import org.alfresco.service.cmr.repository.InvalidNodeRefException;
|
|||||||
import org.alfresco.service.cmr.repository.NodeRef;
|
import org.alfresco.service.cmr.repository.NodeRef;
|
||||||
import org.alfresco.service.cmr.security.AccessStatus;
|
import org.alfresco.service.cmr.security.AccessStatus;
|
||||||
import org.alfresco.service.namespace.QName;
|
import org.alfresco.service.namespace.QName;
|
||||||
import org.alfresco.util.ParameterCheck;
|
|
||||||
import org.hibernate.Query;
|
import org.hibernate.Query;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
import org.springframework.orm.hibernate3.HibernateCallback;
|
import org.springframework.orm.hibernate3.HibernateCallback;
|
||||||
@@ -98,7 +97,7 @@ public class PermissionsDaoComponentImpl extends HibernateDaoSupport implements
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
npe = createSimpleNodePermissionEntry(acl);
|
npe = createSimpleNodePermissionEntry(node);
|
||||||
}
|
}
|
||||||
// done
|
// done
|
||||||
if (logger.isDebugEnabled())
|
if (logger.isDebugEnabled())
|
||||||
@@ -139,7 +138,6 @@ public class PermissionsDaoComponentImpl extends HibernateDaoSupport implements
|
|||||||
private DbAccessControlList createAccessControlList(Node node)
|
private DbAccessControlList createAccessControlList(Node node)
|
||||||
{
|
{
|
||||||
DbAccessControlList acl = new DbAccessControlListImpl();
|
DbAccessControlList acl = new DbAccessControlListImpl();
|
||||||
acl.setNode(node);
|
|
||||||
acl.setInherits(INHERIT_PERMISSIONS_DEFAULT);
|
acl.setInherits(INHERIT_PERMISSIONS_DEFAULT);
|
||||||
getHibernateTemplate().save(acl);
|
getHibernateTemplate().save(acl);
|
||||||
|
|
||||||
@@ -184,10 +182,10 @@ public class PermissionsDaoComponentImpl extends HibernateDaoSupport implements
|
|||||||
DbAccessControlList acl = getAccessControlList(node, false);
|
DbAccessControlList acl = getAccessControlList(node, false);
|
||||||
if (acl != null)
|
if (acl != null)
|
||||||
{
|
{
|
||||||
|
// maintain referencial integrity
|
||||||
|
node.setAccessControlList(null);
|
||||||
// delete the access control list - it will cascade to the entries
|
// delete the access control list - it will cascade to the entries
|
||||||
getHibernateTemplate().delete(acl);
|
getHibernateTemplate().delete(acl);
|
||||||
// maintain inverse
|
|
||||||
node.setAccessControlList(null);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -383,10 +381,10 @@ public class PermissionsDaoComponentImpl extends HibernateDaoSupport implements
|
|||||||
DbAccessControlList acl = getAccessControlList(node, false);
|
DbAccessControlList acl = getAccessControlList(node, false);
|
||||||
if (acl != null)
|
if (acl != null)
|
||||||
{
|
{
|
||||||
|
// maintain referencial integrity
|
||||||
|
node.setAccessControlList(null);
|
||||||
// drop the list
|
// drop the list
|
||||||
getHibernateTemplate().delete(acl);
|
getHibernateTemplate().delete(acl);
|
||||||
// update node
|
|
||||||
node.setAccessControlList(null);
|
|
||||||
}
|
}
|
||||||
// create the access control list
|
// create the access control list
|
||||||
acl = createAccessControlList(node);
|
acl = createAccessControlList(node);
|
||||||
@@ -452,25 +450,34 @@ public class PermissionsDaoComponentImpl extends HibernateDaoSupport implements
|
|||||||
// Utility methods to create simple detached objects for the outside world
|
// Utility methods to create simple detached objects for the outside world
|
||||||
// We do not pass out the hibernate objects
|
// We do not pass out the hibernate objects
|
||||||
|
|
||||||
private SimpleNodePermissionEntry createSimpleNodePermissionEntry(DbAccessControlList acl)
|
private SimpleNodePermissionEntry createSimpleNodePermissionEntry(Node node)
|
||||||
{
|
{
|
||||||
|
DbAccessControlList acl = node.getAccessControlList();
|
||||||
if (acl == null)
|
if (acl == null)
|
||||||
{
|
{
|
||||||
ParameterCheck.mandatory("acl", acl);
|
// there isn't an access control list for the node - spoof a null one
|
||||||
|
SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(
|
||||||
|
node.getNodeRef(),
|
||||||
|
true,
|
||||||
|
Collections.<SimplePermissionEntry> emptySet());
|
||||||
|
return snpe;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Set<DbAccessControlEntry> entries = acl.getEntries();
|
||||||
|
SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(
|
||||||
|
node.getNodeRef(),
|
||||||
|
acl.getInherits(),
|
||||||
|
createSimplePermissionEntries(node, entries));
|
||||||
|
return snpe;
|
||||||
}
|
}
|
||||||
Set<DbAccessControlEntry> entries = acl.getEntries();
|
|
||||||
SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry(
|
|
||||||
acl.getNode().getNodeRef(),
|
|
||||||
acl.getInherits(),
|
|
||||||
createSimplePermissionEntries(entries));
|
|
||||||
return snpe;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param entries access control entries
|
* @param entries access control entries
|
||||||
* @return Returns a unique set of entries that can be given back to the outside world
|
* @return Returns a unique set of entries that can be given back to the outside world
|
||||||
*/
|
*/
|
||||||
private Set<SimplePermissionEntry> createSimplePermissionEntries(Collection<DbAccessControlEntry> entries)
|
private Set<SimplePermissionEntry> createSimplePermissionEntries(Node node, Collection<DbAccessControlEntry> entries)
|
||||||
{
|
{
|
||||||
if (entries == null)
|
if (entries == null)
|
||||||
{
|
{
|
||||||
@@ -481,20 +488,20 @@ public class PermissionsDaoComponentImpl extends HibernateDaoSupport implements
|
|||||||
{
|
{
|
||||||
for (DbAccessControlEntry entry : entries)
|
for (DbAccessControlEntry entry : entries)
|
||||||
{
|
{
|
||||||
spes.add(createSimplePermissionEntry(entry));
|
spes.add(createSimplePermissionEntry(node, entry));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return spes;
|
return spes;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static SimplePermissionEntry createSimplePermissionEntry(DbAccessControlEntry ace)
|
private static SimplePermissionEntry createSimplePermissionEntry(Node node, DbAccessControlEntry ace)
|
||||||
{
|
{
|
||||||
if (ace == null)
|
if (ace == null)
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return new SimplePermissionEntry(
|
return new SimplePermissionEntry(
|
||||||
ace.getAccessControlList().getNode().getNodeRef(),
|
node.getNodeRef(),
|
||||||
createSimplePermissionReference(ace.getPermission()),
|
createSimplePermissionReference(ace.getPermission()),
|
||||||
ace.getAuthority().getRecipient(),
|
ace.getAuthority().getRecipient(),
|
||||||
ace.isAllowed() ? AccessStatus.ALLOWED : AccessStatus.DENIED);
|
ace.isAllowed() ? AccessStatus.ALLOWED : AccessStatus.DENIED);
|
||||||
|
@@ -159,11 +159,18 @@ public class FileImporterImpl implements FileImporter
|
|||||||
final File file,
|
final File file,
|
||||||
FileFilter filter,
|
FileFilter filter,
|
||||||
boolean recurse,
|
boolean recurse,
|
||||||
String containerName) throws Exception
|
final String containerName) throws Exception
|
||||||
{
|
{
|
||||||
if (containerName != null)
|
if (containerName != null)
|
||||||
{
|
{
|
||||||
NodeRef newContainer = createDirectory(container, containerName, containerName);
|
TransactionWork<NodeRef> createDirectoryWork = new TransactionWork<NodeRef>()
|
||||||
|
{
|
||||||
|
public NodeRef doWork() throws Exception
|
||||||
|
{
|
||||||
|
return createDirectory(container, containerName, containerName);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
NodeRef newContainer = TransactionUtil.executeInUserTransaction(transactionService, createDirectoryWork);
|
||||||
return create(counter, newContainer, file, filter, recurse, null);
|
return create(counter, newContainer, file, filter, recurse, null);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -40,8 +40,15 @@ public class ScriptableHashMap<K,V> extends HashMap implements Scriptable
|
|||||||
*/
|
*/
|
||||||
public Object get(String name, Scriptable start)
|
public Object get(String name, Scriptable start)
|
||||||
{
|
{
|
||||||
// get the property from the underlying map
|
// get the property from the underlying QName map
|
||||||
return get(name);
|
if ("length".equals(name))
|
||||||
|
{
|
||||||
|
return this.size();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return get(name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -0,0 +1,193 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2005 Alfresco, Inc.
|
||||||
|
*
|
||||||
|
* Licensed under the Mozilla Public License version 1.1
|
||||||
|
* with a permitted attribution clause. You may obtain a
|
||||||
|
* copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.alfresco.org/legal/license.txt
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||||
|
* either express or implied. See the License for the specific
|
||||||
|
* language governing permissions and limitations under the
|
||||||
|
* License.
|
||||||
|
*/
|
||||||
|
package org.alfresco.repo.model.filefolder;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
|
import org.alfresco.model.ContentModel;
|
||||||
|
import org.alfresco.repo.content.transform.AbstractContentTransformerTest;
|
||||||
|
import org.alfresco.repo.transaction.TransactionUtil;
|
||||||
|
import org.alfresco.repo.transaction.TransactionUtil.TransactionWork;
|
||||||
|
import org.alfresco.service.ServiceRegistry;
|
||||||
|
import org.alfresco.service.cmr.model.FileFolderService;
|
||||||
|
import org.alfresco.service.cmr.model.FileInfo;
|
||||||
|
import org.alfresco.service.cmr.repository.ContentWriter;
|
||||||
|
import org.alfresco.service.cmr.repository.NodeRef;
|
||||||
|
import org.alfresco.service.cmr.repository.NodeService;
|
||||||
|
import org.alfresco.service.cmr.repository.StoreRef;
|
||||||
|
import org.alfresco.service.namespace.NamespaceService;
|
||||||
|
import org.alfresco.service.namespace.QName;
|
||||||
|
import org.alfresco.service.transaction.TransactionService;
|
||||||
|
import org.alfresco.util.ApplicationContextHelper;
|
||||||
|
import org.alfresco.util.GUID;
|
||||||
|
import org.springframework.context.ApplicationContext;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tests around some of the data structures that lead to performance
|
||||||
|
* degradation. We use the {@link org.alfresco.service.cmr.model.FileFolderService FileFolderService}
|
||||||
|
* as it provides the most convenient and most common test scenarios.
|
||||||
|
* <p>
|
||||||
|
* Note that this test is not designed to validate performance figures, but is
|
||||||
|
* rather a handy tool for doing benchmarking. It is therefore not named <i>*Test</i> as is the
|
||||||
|
* pattern for getting tests run by the continuous build.
|
||||||
|
*
|
||||||
|
* @author Derek Hulley
|
||||||
|
*/
|
||||||
|
public class FileFolderPerformanceTester extends TestCase
|
||||||
|
{
|
||||||
|
private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext();
|
||||||
|
|
||||||
|
private TransactionService transactionService;
|
||||||
|
private NodeService nodeService;
|
||||||
|
private FileFolderService fileFolderService;
|
||||||
|
private StoreRef storeRef;
|
||||||
|
private NodeRef rootFolderRef;
|
||||||
|
private File dataFile;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception
|
||||||
|
{
|
||||||
|
ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY);
|
||||||
|
transactionService = serviceRegistry.getTransactionService();
|
||||||
|
nodeService = serviceRegistry.getNodeService();
|
||||||
|
fileFolderService = serviceRegistry.getFileFolderService();
|
||||||
|
|
||||||
|
// create a folder root to work in
|
||||||
|
storeRef = nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + "_" + System.currentTimeMillis());
|
||||||
|
NodeRef rootNodeRef = nodeService.getRootNode(storeRef);
|
||||||
|
rootFolderRef = nodeService.createNode(
|
||||||
|
rootNodeRef,
|
||||||
|
ContentModel.ASSOC_CHILDREN,
|
||||||
|
QName.createQName(NamespaceService.ALFRESCO_URI, getName()),
|
||||||
|
ContentModel.TYPE_FOLDER).getChildRef();
|
||||||
|
dataFile = AbstractContentTransformerTest.loadQuickTestFile("txt");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSetUp() throws Exception
|
||||||
|
{
|
||||||
|
assertNotNull(dataFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates <code>folderCount</code> folders below the given parent and populates each folder with
|
||||||
|
* <code>fileCount</code> files. The folders will be created as siblings in one go, but the files
|
||||||
|
* are added one to each folder until each folder has the presribed number of files within it.
|
||||||
|
* This can therefore be used to test the performance when the L2 cache sizes are exceeded.
|
||||||
|
* <p>
|
||||||
|
* Each creation (file or folder) uses the <b>REQUIRES_NEW</b> transaction declaration.
|
||||||
|
*
|
||||||
|
* @param parentNodeRef the level zero parent
|
||||||
|
* @return Returns the average time (ms) to create the <b>files only</b>
|
||||||
|
*/
|
||||||
|
private double buildStructure(final NodeRef parentNodeRef, final int folderCount, final int fileCount)
|
||||||
|
{
|
||||||
|
List<NodeRef> folders = new ArrayList<NodeRef>(folderCount);
|
||||||
|
for (int i = 0; i < folderCount; i++)
|
||||||
|
{
|
||||||
|
TransactionWork<FileInfo> createFolderWork = new TransactionWork<FileInfo>()
|
||||||
|
{
|
||||||
|
public FileInfo doWork() throws Exception
|
||||||
|
{
|
||||||
|
FileInfo folderInfo = fileFolderService.create(
|
||||||
|
parentNodeRef,
|
||||||
|
GUID.generate(),
|
||||||
|
ContentModel.TYPE_FOLDER);
|
||||||
|
// done
|
||||||
|
return folderInfo;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
FileInfo folderInfo = TransactionUtil.executeInUserTransaction(transactionService, createFolderWork);
|
||||||
|
// keep the reference
|
||||||
|
folders.add(folderInfo.getNodeRef());
|
||||||
|
}
|
||||||
|
// now progress around the folders until they have been populated
|
||||||
|
long start = System.currentTimeMillis();
|
||||||
|
for (int i = 0; i < fileCount; i++)
|
||||||
|
{
|
||||||
|
for (final NodeRef folderRef : folders)
|
||||||
|
{
|
||||||
|
TransactionWork<FileInfo> createFileWork = new TransactionWork<FileInfo>()
|
||||||
|
{
|
||||||
|
public FileInfo doWork() throws Exception
|
||||||
|
{
|
||||||
|
FileInfo fileInfo = fileFolderService.create(
|
||||||
|
folderRef,
|
||||||
|
GUID.generate(),
|
||||||
|
ContentModel.TYPE_CONTENT);
|
||||||
|
NodeRef nodeRef = fileInfo.getNodeRef();
|
||||||
|
// write the content
|
||||||
|
ContentWriter writer = fileFolderService.getWriter(nodeRef);
|
||||||
|
writer.putContent(dataFile);
|
||||||
|
// done
|
||||||
|
return fileInfo;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
TransactionUtil.executeInUserTransaction(transactionService, createFileWork);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
long end = System.currentTimeMillis();
|
||||||
|
long time = (end - start);
|
||||||
|
double average = (double) time / (double) (folderCount * fileCount);
|
||||||
|
// done
|
||||||
|
return average;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void timeBuildStructure(NodeRef parentNodeRef, int folderCount, int fileCount)
|
||||||
|
{
|
||||||
|
System.out.println("Starting load of " + fileCount + " files in each of " + folderCount + " folders");
|
||||||
|
double average = buildStructure(parentNodeRef, folderCount, fileCount);
|
||||||
|
System.out.println(
|
||||||
|
"[" + getName() + "] \n" +
|
||||||
|
" Created " + fileCount + " files in each of " + folderCount + " folders: \n" +
|
||||||
|
" Average: " + String.format("%10.2f", average) + "ms per file \n" +
|
||||||
|
" Average: " + String.format("%10.2f", 1000.0/average) + " files per second");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void test1Folder10Children() throws Exception
|
||||||
|
{
|
||||||
|
timeBuildStructure(rootFolderRef, 1, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void test10Folders100ChildrenMultiTxn() throws Exception
|
||||||
|
{
|
||||||
|
timeBuildStructure(rootFolderRef, 10, 100);
|
||||||
|
}
|
||||||
|
//
|
||||||
|
// public void test100Folders1Child() throws Exception
|
||||||
|
// {
|
||||||
|
// timeBuildStructure(rootFolderRef, 100, 1);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public void test1000Folders10Children() throws Exception
|
||||||
|
// {
|
||||||
|
// timeBuildStructure(rootFolderRef, 1000, 10);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public void test1000Folders100Children() throws Exception
|
||||||
|
// {
|
||||||
|
// timeBuildStructure(rootFolderRef, 5, 100);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public void test1000Folders1000Children() throws Exception
|
||||||
|
// {
|
||||||
|
// timeBuildStructure(rootFolderRef, 1000, 1000);
|
||||||
|
// }
|
||||||
|
}
|
@@ -87,10 +87,9 @@ public class HibernatePermissionTest extends BaseSpringTest
|
|||||||
{
|
{
|
||||||
// create a new Node
|
// create a new Node
|
||||||
DbAccessControlList accessControlList = new DbAccessControlListImpl();
|
DbAccessControlList accessControlList = new DbAccessControlListImpl();
|
||||||
accessControlList.setNode(node);
|
|
||||||
accessControlList.setInherits(true);
|
accessControlList.setInherits(true);
|
||||||
|
|
||||||
Serializable id = getSession().save(accessControlList);
|
Serializable id = getSession().save(accessControlList);
|
||||||
|
node.setAccessControlList(accessControlList);
|
||||||
|
|
||||||
// throw the reference away and get the a new one for the id
|
// throw the reference away and get the a new one for the id
|
||||||
accessControlList = (DbAccessControlList) getSession().load(DbAccessControlListImpl.class, id);
|
accessControlList = (DbAccessControlList) getSession().load(DbAccessControlListImpl.class, id);
|
||||||
@@ -166,9 +165,9 @@ public class HibernatePermissionTest extends BaseSpringTest
|
|||||||
{
|
{
|
||||||
// create a new access control list for the node
|
// create a new access control list for the node
|
||||||
DbAccessControlList accessControlList = new DbAccessControlListImpl();
|
DbAccessControlList accessControlList = new DbAccessControlListImpl();
|
||||||
accessControlList.setNode(node);
|
|
||||||
accessControlList.setInherits(true);
|
accessControlList.setInherits(true);
|
||||||
Serializable nodeAclId = getSession().save(accessControlList);
|
Serializable nodeAclId = getSession().save(accessControlList);
|
||||||
|
node.setAccessControlList(accessControlList);
|
||||||
|
|
||||||
DbAuthority recipient = new DbAuthorityImpl();
|
DbAuthority recipient = new DbAuthorityImpl();
|
||||||
recipient.setRecipient("Test");
|
recipient.setRecipient("Test");
|
||||||
@@ -196,6 +195,7 @@ public class HibernatePermissionTest extends BaseSpringTest
|
|||||||
assertEquals(1, accessControlEntry.getAuthority().getExternalKeys().size());
|
assertEquals(1, accessControlEntry.getAuthority().getExternalKeys().size());
|
||||||
|
|
||||||
// Check that deletion of the list cascades
|
// Check that deletion of the list cascades
|
||||||
|
node.setAccessControlList(null);
|
||||||
getSession().delete(accessControlList);
|
getSession().delete(accessControlList);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
Reference in New Issue
Block a user