From bfd0d2a337a7718130ff1c36b0b7ca490c47ccc6 Mon Sep 17 00:00:00 2001 From: Britt Park Date: Wed, 17 May 2006 22:42:03 +0000 Subject: [PATCH] Afternoon merge. git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/BRANCHES/WCM-DEV2/root@2915 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261 --- .../alfresco/domain/hibernate-cfg.properties | 2 +- ...custom-hibernate-dialect.properties.sample | 5 + .../alfresco/patch/patch-services-context.xml | 12 +- config/alfresco/scheduled-jobs-context.xml | 7 +- .../alfresco/repo/cache/EhCacheTracerJob.java | 29 ++- .../repo/domain/DbAccessControlList.java | 8 +- .../repo/domain/hibernate/ChildAssocImpl.java | 5 - .../hibernate/DbAccessControlListImpl.java | 19 +- .../domain/hibernate/HibernateNodeTest.java | 130 ++++++++++++ .../repo/domain/hibernate/Node.hbm.xml | 26 +-- .../repo/domain/hibernate/NodeAssocImpl.java | 5 - .../repo/domain/hibernate/Permission.hbm.xml | 16 +- .../PermissionsDaoComponentImpl.java | 45 ++-- .../repo/importer/FileImporterImpl.java | 11 +- .../repo/jscript/ScriptableHashMap.java | 11 +- .../FileFolderPerformanceTester.java | 193 ++++++++++++++++++ .../hibernate/HibernatePermissionTest.java | 6 +- 17 files changed, 445 insertions(+), 85 deletions(-) create mode 100644 source/java/org/alfresco/repo/model/filefolder/FileFolderPerformanceTester.java diff --git a/config/alfresco/domain/hibernate-cfg.properties b/config/alfresco/domain/hibernate-cfg.properties index 7fb803806a..8c37e5334d 100644 --- a/config/alfresco/domain/hibernate-cfg.properties +++ b/config/alfresco/domain/hibernate-cfg.properties @@ -2,7 +2,7 @@ # Hibernate configuration # hibernate.jdbc.use_streams_for_binary=true -hibernate.dialect=org.hibernate.dialect.MySQLDialect +hibernate.dialect=org.hibernate.dialect.MySQLInnoDBDialect hibernate.show_sql=false hibernate.hbm2ddl.auto=update hibernate.cache.use_query_cache=true diff --git a/config/alfresco/extension/custom-hibernate-dialect.properties.sample b/config/alfresco/extension/custom-hibernate-dialect.properties.sample index c78650c3e8..d7fe86dd5e 100644 --- a/config/alfresco/extension/custom-hibernate-dialect.properties.sample +++ b/config/alfresco/extension/custom-hibernate-dialect.properties.sample @@ -1,5 +1,10 @@ +# Sample Hibernate configuration to disnable Hibernate schema updates +# Values are "validate" or "update" (default) +#hibernate.hbm2ddl.auto=validate + # # Sample Hibernate configuration for changing Database dialect +# For a full list: http://www.hibernate.org/hib_docs/v3/reference/en/html_single/#configuration-optional-dialects # # diff --git a/config/alfresco/patch/patch-services-context.xml b/config/alfresco/patch/patch-services-context.xml index ad1eecf2c6..db0255e93e 100644 --- a/config/alfresco/patch/patch-services-context.xml +++ b/config/alfresco/patch/patch-services-context.xml @@ -279,8 +279,8 @@ patch.forumsIcons patch.forumsIcons.description 0 - 7 - 8 + 12 + 13 @@ -289,8 +289,8 @@ patch.emailTemplatesFolder patch.emailTemplatesFolder.description 0 - 8 - 9 + 12 + 13 @@ -303,8 +303,8 @@ patch.emailTemplatesContent patch.emailTemplatesContent.description 0 - 9 - 10 + 12 + 13 alfresco/templates/email_templates.acp diff --git a/config/alfresco/scheduled-jobs-context.xml b/config/alfresco/scheduled-jobs-context.xml index 2e9db77be8..0bf0735d19 100644 --- a/config/alfresco/scheduled-jobs-context.xml +++ b/config/alfresco/scheduled-jobs-context.xml @@ -134,7 +134,7 @@ - + @@ -143,6 +143,11 @@ + 3600000 diff --git a/source/java/org/alfresco/repo/cache/EhCacheTracerJob.java b/source/java/org/alfresco/repo/cache/EhCacheTracerJob.java index 6f6840865b..e9717bee1f 100644 --- a/source/java/org/alfresco/repo/cache/EhCacheTracerJob.java +++ b/source/java/org/alfresco/repo/cache/EhCacheTracerJob.java @@ -82,6 +82,7 @@ public class EhCacheTracerJob implements Job long maxHeapSize = Runtime.getRuntime().maxMemory(); long totalSize = 0L; + double estimatedMaxSize = 0L; // get all the caches String[] cacheNames = cacheManager.getCacheNames(); logger.debug("Dumping EHCache info:"); @@ -97,13 +98,18 @@ public class EhCacheTracerJob implements Job logger.debug(analysis); // get the size totalSize += analysis.getSize(); + estimatedMaxSize += Double.isNaN(analysis.getEstimatedMaxSize()) ? 0.0 : analysis.getEstimatedMaxSize(); } // check the size double sizePercentage = (double)totalSize / (double)maxHeapSize * 100.0; + double maxSizePercentage = estimatedMaxSize / (double)maxHeapSize * 100.0; String msg = String.format( - "EHCaches currently consume %5.2f MB or %3.2f percent of system VM size", + "EHCaches currently consume %5.2f MB or %3.2f percent of system VM size. \n" + + "The estimated maximum size is %5.2f MB or %3.2f percent of system VM size.", (double)totalSize / 1024.0 / 1024.0, - sizePercentage); + sizePercentage, + estimatedMaxSize / 1024.0 / 1024.0, + maxSizePercentage); logger.debug(msg); } @@ -111,6 +117,13 @@ public class EhCacheTracerJob implements Job { private Cache cache; private long size = 0L; + double sizeMB; + long maxSize; + long currentSize; + long hitCount; + long missCount; + double percentageFull; + double estMaxSize; public CacheAnalysis(Cache cache) throws CacheException { @@ -133,6 +146,11 @@ public class EhCacheTracerJob implements Job return size; } + public synchronized double getEstimatedMaxSize() + { + return estMaxSize; + } + @SuppressWarnings("unchecked") private synchronized void calculateSize() throws CacheException { @@ -143,6 +161,13 @@ public class EhCacheTracerJob implements Job Element element = cache.get(key); size += getSize(element); } + sizeMB = (double)size/1024.0/1024.0; + maxSize = cache.getMaxElementsInMemory(); + currentSize = cache.getMemoryStoreSize(); + hitCount = cache.getHitCount(); + missCount = cache.getMissCountNotFound(); + percentageFull = (double)currentSize / (double)maxSize * 100.0; + estMaxSize = size / (double) currentSize * (double) maxSize; } private long getSize(Serializable obj) diff --git a/source/java/org/alfresco/repo/domain/DbAccessControlList.java b/source/java/org/alfresco/repo/domain/DbAccessControlList.java index dad02839a7..27ec08b1a6 100644 --- a/source/java/org/alfresco/repo/domain/DbAccessControlList.java +++ b/source/java/org/alfresco/repo/domain/DbAccessControlList.java @@ -30,9 +30,11 @@ public interface DbAccessControlList { public long getId(); - public Node getNode(); - - public void setNode(Node node); + /** + * + * @return Returns the access control entries for this access control list + */ + public Set getEntries(); /** * diff --git a/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java b/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java index 8c338c180e..4d54cb993f 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java +++ b/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java @@ -57,11 +57,6 @@ public class ChildAssocImpl implements ChildAssoc // add the forward associations this.setParent(parentNode); this.setChild(childNode); - // Force initialization of the inverse collections - // so that we don't queue additions to them. - // This can go if we move to set-based collections - parentNode.getChildAssocs().size(); - childNode.getParentAssocs().size(); // add the inverse associations parentNode.getChildAssocs().add(this); childNode.getParentAssocs().add(this); diff --git a/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java b/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java index 4fa4f1725e..d761e6a5dd 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java +++ b/source/java/org/alfresco/repo/domain/hibernate/DbAccessControlListImpl.java @@ -26,8 +26,6 @@ import org.alfresco.repo.domain.DbAccessControlList; import org.alfresco.repo.domain.DbAuthority; import org.alfresco.repo.domain.DbPermission; import org.alfresco.repo.domain.DbPermissionKey; -import org.alfresco.repo.domain.Node; -import org.alfresco.util.EqualsHelper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Session; @@ -42,7 +40,6 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces private static Log logger = LogFactory.getLog(DbAccessControlListImpl.class); private long id; - private Node node; private Set entries; private boolean inherits; @@ -57,7 +54,6 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces StringBuilder sb = new StringBuilder(128); sb.append("DbAccessControlListImpl") .append("[ id=").append(id) - .append(", node=").append(node) .append(", entries=").append(entries.size()) .append(", inherits=").append(inherits) .append("]"); @@ -77,14 +73,13 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces } DbAccessControlList other = (DbAccessControlList) o; - return (this.inherits == other.getInherits()) - && (EqualsHelper.nullSafeEquals(this.node, other.getNode())); + return (this.inherits == other.getInherits()); } @Override public int hashCode() { - return (node == null ? 0 : node.hashCode()); + return (inherits == false ? 0 : 17); } public long getId() @@ -101,16 +96,6 @@ public class DbAccessControlListImpl extends LifecycleAdapter implements DbAcces this.id = id; } - public Node getNode() - { - return node; - } - - public void setNode(Node node) - { - this.node = node; - } - public Set getEntries() { return entries; diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java index f291218312..f4a8e650c1 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java +++ b/source/java/org/alfresco/repo/domain/hibernate/HibernateNodeTest.java @@ -28,6 +28,7 @@ import javax.transaction.UserTransaction; import org.alfresco.model.ContentModel; import org.alfresco.repo.domain.ChildAssoc; +import org.alfresco.repo.domain.DbAccessControlList; import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.NodeAssoc; import org.alfresco.repo.domain.NodeKey; @@ -35,12 +36,14 @@ import org.alfresco.repo.domain.NodeStatus; import org.alfresco.repo.domain.PropertyValue; import org.alfresco.repo.domain.Store; import org.alfresco.repo.domain.StoreKey; +import org.alfresco.repo.transaction.AlfrescoTransactionSupport; import org.alfresco.service.cmr.dictionary.DataTypeDefinition; import org.alfresco.service.cmr.repository.StoreRef; import org.alfresco.service.namespace.QName; import org.alfresco.service.transaction.TransactionService; import org.alfresco.util.BaseSpringTest; import org.alfresco.util.GUID; +import org.hibernate.CacheMode; import org.hibernate.exception.ConstraintViolationException; /** @@ -385,6 +388,133 @@ public class HibernateNodeTest extends BaseSpringTest { txn.rollback(); } + } + + /** + * Create some simple parent-child relationships and flush them. Then read them back in without + * using the L2 cache. + */ + public void testQueryJoins() throws Exception + { + getSession().setCacheMode(CacheMode.IGNORE); + // make a container node + Node containerNode = new NodeImpl(); + containerNode.setStore(store); + containerNode.setUuid(GUID.generate()); + containerNode.setTypeQName(ContentModel.TYPE_CONTAINER); + containerNode.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC")); + containerNode.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC")); + containerNode.getAspects().add(ContentModel.ASPECT_AUDITABLE); + Serializable containerNodeId = getSession().save(containerNode); + NodeKey containerNodeKey = new NodeKey(containerNode.getNodeRef()); + NodeStatus containerNodeStatus = new NodeStatusImpl(); + containerNodeStatus.setKey(containerNodeKey); + containerNodeStatus.setNode(containerNode); + containerNodeStatus.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + getSession().save(containerNodeStatus); + // make content node 1 + Node contentNode1 = new NodeImpl(); + contentNode1.setStore(store); + contentNode1.setUuid(GUID.generate()); + contentNode1.setTypeQName(ContentModel.TYPE_CONTENT); + contentNode1.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC")); + contentNode1.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC")); + contentNode1.getAspects().add(ContentModel.ASPECT_AUDITABLE); + Serializable contentNode1Id = getSession().save(contentNode1); + NodeKey contentNodeKey1 = new NodeKey(contentNode1.getNodeRef()); + NodeStatus contentNodeStatus1 = new NodeStatusImpl(); + contentNodeStatus1.setKey(contentNodeKey1); + contentNodeStatus1.setNode(contentNode1); + contentNodeStatus1.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + getSession().save(contentNodeStatus1); + // make content node 2 + Node contentNode2 = new NodeImpl(); + contentNode2.setStore(store); + contentNode2.setUuid(GUID.generate()); + contentNode2.setTypeQName(ContentModel.TYPE_CONTENT); + Serializable contentNode2Id = getSession().save(contentNode2); + contentNode2.getProperties().put(ContentModel.PROP_AUTHOR, new PropertyValue(DataTypeDefinition.TEXT, "ABC")); + contentNode2.getProperties().put(ContentModel.PROP_ARCHIVED_BY, new PropertyValue(DataTypeDefinition.TEXT, "ABC")); + contentNode2.getAspects().add(ContentModel.ASPECT_AUDITABLE); + NodeKey contentNodeKey2 = new NodeKey(contentNode2.getNodeRef()); + NodeStatus contentNodeStatus2 = new NodeStatusImpl(); + contentNodeStatus2.setKey(contentNodeKey2); + contentNodeStatus2.setNode(contentNode2); + contentNodeStatus2.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); + getSession().save(contentNodeStatus2); + // create an association to content 1 + ChildAssoc assoc1 = new ChildAssocImpl(); + assoc1.setIsPrimary(true); + assoc1.setTypeQName(QName.createQName(null, "type1")); + assoc1.setQname(QName.createQName(null, "number1")); + assoc1.buildAssociation(containerNode, contentNode1); + getSession().save(assoc1); + // create an association to content 2 + ChildAssoc assoc2 = new ChildAssocImpl(); + assoc2.setIsPrimary(true); + assoc2.setTypeQName(QName.createQName(null, "type2")); + assoc2.setQname(QName.createQName(null, "number2")); + assoc2.buildAssociation(containerNode, contentNode2); + getSession().save(assoc2); + + // make sure that there are no entities cached in either L1 or L2 + getSession().flush(); + getSession().clear(); + + // now read the structure back in from the container down + containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey); + containerNode = containerNodeStatus.getNode(); + Collection assocs = containerNode.getChildAssocs(); + for (ChildAssoc assoc : assocs) + { + Node childNode = assoc.getChild(); + Store store = childNode.getStore(); + childNode.getAspects().size(); + childNode.getProperties().size(); + childNode.getParentAssocs().size(); + childNode.getChildAssocs().size(); + childNode.getSourceNodeAssocs().size(); + childNode.getTargetNodeAssocs().size(); + DbAccessControlList acl = childNode.getAccessControlList(); + if (acl != null) + { + acl.getEntries().size(); + } + } + + // clear out again + getSession().clear(); + + // now remove a property from each child + containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey); + containerNode = containerNodeStatus.getNode(); + assocs = containerNode.getChildAssocs(); + for (ChildAssoc assoc : assocs) + { + Node childNode = assoc.getChild(); + PropertyValue removed = childNode.getProperties().remove(ContentModel.PROP_ARCHIVED_BY); + assertNotNull("Property was not present", removed); + } + // expect that just the specific property gets removed in the delete statement + getSession().flush(); + getSession().clear(); + + // Create a second association to content 2 + // create an association to content 2 + containerNodeStatus = (NodeStatus) getSession().get(NodeStatusImpl.class, containerNodeKey); + containerNode = containerNodeStatus.getNode(); + contentNodeStatus2 = (NodeStatus) getSession().get(NodeStatusImpl.class, contentNodeKey2); + contentNode2 = contentNodeStatus2.getNode(); + ChildAssoc assoc3 = new ChildAssocImpl(); + assoc3.setIsPrimary(false); + assoc3.setTypeQName(QName.createQName(null, "type3")); + assoc3.setQname(QName.createQName(null, "number3")); + assoc3.buildAssociation(containerNode, contentNode2); // check whether the children are pulled in for this + getSession().save(assoc3); + + // flush it + getSession().flush(); + getSession().clear(); } } \ No newline at end of file diff --git a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml index 6c7b1d4f58..c832657637 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml +++ b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml @@ -38,14 +38,16 @@ - - + + unique="false" + not-null="false" + cascade="delete" /> @@ -98,8 +100,8 @@ @@ -173,8 +175,8 @@ @@ -182,8 +184,8 @@ diff --git a/source/java/org/alfresco/repo/domain/hibernate/NodeAssocImpl.java b/source/java/org/alfresco/repo/domain/hibernate/NodeAssocImpl.java index 881226e691..b577094017 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/NodeAssocImpl.java +++ b/source/java/org/alfresco/repo/domain/hibernate/NodeAssocImpl.java @@ -54,11 +54,6 @@ public class NodeAssocImpl implements NodeAssoc // add the forward associations this.setTarget(targetNode); this.setSource(sourceNode); - // Force initialization of the inverse collections - // so that we don't queue additions to them. - // This can go if we move to set-based collections - sourceNode.getSourceNodeAssocs().size(); - targetNode.getTargetNodeAssocs().size(); // add the inverse associations sourceNode.getTargetNodeAssocs().add(this); targetNode.getSourceNodeAssocs().add(this); diff --git a/source/java/org/alfresco/repo/domain/hibernate/Permission.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Permission.hbm.xml index 9d72a4fe85..cc50940a3c 100644 --- a/source/java/org/alfresco/repo/domain/hibernate/Permission.hbm.xml +++ b/source/java/org/alfresco/repo/domain/hibernate/Permission.hbm.xml @@ -19,13 +19,15 @@ - - - + + + + emptySet()); + return snpe; + } + else + { + Set entries = acl.getEntries(); + SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry( + node.getNodeRef(), + acl.getInherits(), + createSimplePermissionEntries(node, entries)); + return snpe; } - Set entries = acl.getEntries(); - SimpleNodePermissionEntry snpe = new SimpleNodePermissionEntry( - acl.getNode().getNodeRef(), - acl.getInherits(), - createSimplePermissionEntries(entries)); - return snpe; } /** * @param entries access control entries * @return Returns a unique set of entries that can be given back to the outside world */ - private Set createSimplePermissionEntries(Collection entries) + private Set createSimplePermissionEntries(Node node, Collection entries) { if (entries == null) { @@ -481,20 +488,20 @@ public class PermissionsDaoComponentImpl extends HibernateDaoSupport implements { for (DbAccessControlEntry entry : entries) { - spes.add(createSimplePermissionEntry(entry)); + spes.add(createSimplePermissionEntry(node, entry)); } } return spes; } - private static SimplePermissionEntry createSimplePermissionEntry(DbAccessControlEntry ace) + private static SimplePermissionEntry createSimplePermissionEntry(Node node, DbAccessControlEntry ace) { if (ace == null) { return null; } return new SimplePermissionEntry( - ace.getAccessControlList().getNode().getNodeRef(), + node.getNodeRef(), createSimplePermissionReference(ace.getPermission()), ace.getAuthority().getRecipient(), ace.isAllowed() ? AccessStatus.ALLOWED : AccessStatus.DENIED); diff --git a/source/java/org/alfresco/repo/importer/FileImporterImpl.java b/source/java/org/alfresco/repo/importer/FileImporterImpl.java index c510ca9bae..aef5fe5ea4 100644 --- a/source/java/org/alfresco/repo/importer/FileImporterImpl.java +++ b/source/java/org/alfresco/repo/importer/FileImporterImpl.java @@ -159,11 +159,18 @@ public class FileImporterImpl implements FileImporter final File file, FileFilter filter, boolean recurse, - String containerName) throws Exception + final String containerName) throws Exception { if (containerName != null) { - NodeRef newContainer = createDirectory(container, containerName, containerName); + TransactionWork createDirectoryWork = new TransactionWork() + { + public NodeRef doWork() throws Exception + { + return createDirectory(container, containerName, containerName); + } + }; + NodeRef newContainer = TransactionUtil.executeInUserTransaction(transactionService, createDirectoryWork); return create(counter, newContainer, file, filter, recurse, null); } diff --git a/source/java/org/alfresco/repo/jscript/ScriptableHashMap.java b/source/java/org/alfresco/repo/jscript/ScriptableHashMap.java index bf4afec1ee..95b2a72d6d 100644 --- a/source/java/org/alfresco/repo/jscript/ScriptableHashMap.java +++ b/source/java/org/alfresco/repo/jscript/ScriptableHashMap.java @@ -40,8 +40,15 @@ public class ScriptableHashMap extends HashMap implements Scriptable */ public Object get(String name, Scriptable start) { - // get the property from the underlying map - return get(name); + // get the property from the underlying QName map + if ("length".equals(name)) + { + return this.size(); + } + else + { + return get(name); + } } /** diff --git a/source/java/org/alfresco/repo/model/filefolder/FileFolderPerformanceTester.java b/source/java/org/alfresco/repo/model/filefolder/FileFolderPerformanceTester.java new file mode 100644 index 0000000000..fd99ea8596 --- /dev/null +++ b/source/java/org/alfresco/repo/model/filefolder/FileFolderPerformanceTester.java @@ -0,0 +1,193 @@ +/* + * Copyright (C) 2005 Alfresco, Inc. + * + * Licensed under the Mozilla Public License version 1.1 + * with a permitted attribution clause. You may obtain a + * copy of the License at + * + * http://www.alfresco.org/legal/license.txt + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific + * language governing permissions and limitations under the + * License. + */ +package org.alfresco.repo.model.filefolder; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; + +import junit.framework.TestCase; + +import org.alfresco.model.ContentModel; +import org.alfresco.repo.content.transform.AbstractContentTransformerTest; +import org.alfresco.repo.transaction.TransactionUtil; +import org.alfresco.repo.transaction.TransactionUtil.TransactionWork; +import org.alfresco.service.ServiceRegistry; +import org.alfresco.service.cmr.model.FileFolderService; +import org.alfresco.service.cmr.model.FileInfo; +import org.alfresco.service.cmr.repository.ContentWriter; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.repository.StoreRef; +import org.alfresco.service.namespace.NamespaceService; +import org.alfresco.service.namespace.QName; +import org.alfresco.service.transaction.TransactionService; +import org.alfresco.util.ApplicationContextHelper; +import org.alfresco.util.GUID; +import org.springframework.context.ApplicationContext; + +/** + * Tests around some of the data structures that lead to performance + * degradation. We use the {@link org.alfresco.service.cmr.model.FileFolderService FileFolderService} + * as it provides the most convenient and most common test scenarios. + *

+ * Note that this test is not designed to validate performance figures, but is + * rather a handy tool for doing benchmarking. It is therefore not named *Test as is the + * pattern for getting tests run by the continuous build. + * + * @author Derek Hulley + */ +public class FileFolderPerformanceTester extends TestCase +{ + private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext(); + + private TransactionService transactionService; + private NodeService nodeService; + private FileFolderService fileFolderService; + private StoreRef storeRef; + private NodeRef rootFolderRef; + private File dataFile; + + @Override + public void setUp() throws Exception + { + ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY); + transactionService = serviceRegistry.getTransactionService(); + nodeService = serviceRegistry.getNodeService(); + fileFolderService = serviceRegistry.getFileFolderService(); + + // create a folder root to work in + storeRef = nodeService.createStore(StoreRef.PROTOCOL_WORKSPACE, getName() + "_" + System.currentTimeMillis()); + NodeRef rootNodeRef = nodeService.getRootNode(storeRef); + rootFolderRef = nodeService.createNode( + rootNodeRef, + ContentModel.ASSOC_CHILDREN, + QName.createQName(NamespaceService.ALFRESCO_URI, getName()), + ContentModel.TYPE_FOLDER).getChildRef(); + dataFile = AbstractContentTransformerTest.loadQuickTestFile("txt"); + } + + public void testSetUp() throws Exception + { + assertNotNull(dataFile); + } + + /** + * Creates folderCount folders below the given parent and populates each folder with + * fileCount files. The folders will be created as siblings in one go, but the files + * are added one to each folder until each folder has the presribed number of files within it. + * This can therefore be used to test the performance when the L2 cache sizes are exceeded. + *

+ * Each creation (file or folder) uses the REQUIRES_NEW transaction declaration. + * + * @param parentNodeRef the level zero parent + * @return Returns the average time (ms) to create the files only + */ + private double buildStructure(final NodeRef parentNodeRef, final int folderCount, final int fileCount) + { + List folders = new ArrayList(folderCount); + for (int i = 0; i < folderCount; i++) + { + TransactionWork createFolderWork = new TransactionWork() + { + public FileInfo doWork() throws Exception + { + FileInfo folderInfo = fileFolderService.create( + parentNodeRef, + GUID.generate(), + ContentModel.TYPE_FOLDER); + // done + return folderInfo; + } + }; + FileInfo folderInfo = TransactionUtil.executeInUserTransaction(transactionService, createFolderWork); + // keep the reference + folders.add(folderInfo.getNodeRef()); + } + // now progress around the folders until they have been populated + long start = System.currentTimeMillis(); + for (int i = 0; i < fileCount; i++) + { + for (final NodeRef folderRef : folders) + { + TransactionWork createFileWork = new TransactionWork() + { + public FileInfo doWork() throws Exception + { + FileInfo fileInfo = fileFolderService.create( + folderRef, + GUID.generate(), + ContentModel.TYPE_CONTENT); + NodeRef nodeRef = fileInfo.getNodeRef(); + // write the content + ContentWriter writer = fileFolderService.getWriter(nodeRef); + writer.putContent(dataFile); + // done + return fileInfo; + } + }; + TransactionUtil.executeInUserTransaction(transactionService, createFileWork); + } + } + long end = System.currentTimeMillis(); + long time = (end - start); + double average = (double) time / (double) (folderCount * fileCount); + // done + return average; + } + + private void timeBuildStructure(NodeRef parentNodeRef, int folderCount, int fileCount) + { + System.out.println("Starting load of " + fileCount + " files in each of " + folderCount + " folders"); + double average = buildStructure(parentNodeRef, folderCount, fileCount); + System.out.println( + "[" + getName() + "] \n" + + " Created " + fileCount + " files in each of " + folderCount + " folders: \n" + + " Average: " + String.format("%10.2f", average) + "ms per file \n" + + " Average: " + String.format("%10.2f", 1000.0/average) + " files per second"); + } + + public void test1Folder10Children() throws Exception + { + timeBuildStructure(rootFolderRef, 1, 10); + } + + public void test10Folders100ChildrenMultiTxn() throws Exception + { + timeBuildStructure(rootFolderRef, 10, 100); + } +// +// public void test100Folders1Child() throws Exception +// { +// timeBuildStructure(rootFolderRef, 100, 1); +// } +// +// public void test1000Folders10Children() throws Exception +// { +// timeBuildStructure(rootFolderRef, 1000, 10); +// } +// +// public void test1000Folders100Children() throws Exception +// { +// timeBuildStructure(rootFolderRef, 5, 100); +// } +// +// public void test1000Folders1000Children() throws Exception +// { +// timeBuildStructure(rootFolderRef, 1000, 1000); +// } +} diff --git a/source/java/org/alfresco/repo/security/permissions/impl/hibernate/HibernatePermissionTest.java b/source/java/org/alfresco/repo/security/permissions/impl/hibernate/HibernatePermissionTest.java index 87933c23d7..f156694ea8 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/hibernate/HibernatePermissionTest.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/hibernate/HibernatePermissionTest.java @@ -87,10 +87,9 @@ public class HibernatePermissionTest extends BaseSpringTest { // create a new Node DbAccessControlList accessControlList = new DbAccessControlListImpl(); - accessControlList.setNode(node); accessControlList.setInherits(true); - Serializable id = getSession().save(accessControlList); + node.setAccessControlList(accessControlList); // throw the reference away and get the a new one for the id accessControlList = (DbAccessControlList) getSession().load(DbAccessControlListImpl.class, id); @@ -166,9 +165,9 @@ public class HibernatePermissionTest extends BaseSpringTest { // create a new access control list for the node DbAccessControlList accessControlList = new DbAccessControlListImpl(); - accessControlList.setNode(node); accessControlList.setInherits(true); Serializable nodeAclId = getSession().save(accessControlList); + node.setAccessControlList(accessControlList); DbAuthority recipient = new DbAuthorityImpl(); recipient.setRecipient("Test"); @@ -196,6 +195,7 @@ public class HibernatePermissionTest extends BaseSpringTest assertEquals(1, accessControlEntry.getAuthority().getExternalKeys().size()); // Check that deletion of the list cascades + node.setAccessControlList(null); getSession().delete(accessControlList); try {