From 472fde85093e20021217768d3b39754d39497f71 Mon Sep 17 00:00:00 2001 From: Derek Hulley Date: Tue, 22 Jun 2010 13:44:03 +0000 Subject: [PATCH] SAIL-294: DAO refactoring - Ripping out Hibernate entity classes and refactor patches git-svn-id: https://svn.alfresco.com/repos/alfresco-enterprise/alfresco/HEAD/root@20753 c4b6b30b-aa2e-2d43-bbcb-ca4b014f7261 --- config/alfresco/avm-services-context.xml | 28 - .../bm-remote-loader-context.xml.sample | 3 - config/alfresco/hibernate-context.xml | 49 - .../patch-common-SqlMap.xml | 86 +- .../messages/patch-service.properties | 4 +- config/alfresco/node-services-context.xml | 3 - .../alfresco/patch/patch-services-context.xml | 35 +- config/alfresco/repository.properties | 5 - .../impl/AuthorityDefaultZonesPatch.java | 18 +- .../patch/impl/FixNameCrcValuesPatch.java | 333 +-- .../patch/impl/InvalidNameEndingPatch.java | 213 -- .../impl/LinkNodeFileExtensionPatch.java | 147 +- .../org/alfresco/repo/domain/ChildAssoc.java | 181 -- .../java/org/alfresco/repo/domain/Node.java | 101 - .../org/alfresco/repo/domain/NodeAssoc.java | 90 - .../java/org/alfresco/repo/domain/Server.java | 37 - .../java/org/alfresco/repo/domain/Store.java | 75 - .../domain/hibernate/AppliedPatch.hbm.xml | 50 - .../repo/domain/hibernate/ChildAssocImpl.java | 548 ---- .../hibernate/DirtySessionAnnotation.java | 41 - .../DirtySessionMethodInterceptor.java | 444 ---- .../hibernate/HibernateSessionHelper.java | 114 - .../HibernateSessionHelperResource.java | 261 -- ...bernateSessionHelperResourceException.java | 55 - ...ibernateSessionHelperResourceProvider.java | 46 - .../hibernate/HibernateSessionHelperTest.java | 638 ----- .../hibernate/HibernateSessionSupport.java | 44 - .../domain/hibernate/LifecycleAdapter.java | 77 - .../repo/domain/hibernate/LocaleUserType.java | 125 - .../repo/domain/hibernate/Node.hbm.xml | 241 -- .../repo/domain/hibernate/NodeImpl.java | 336 --- .../repo/domain/hibernate/ServerImpl.java | 93 - .../hibernate/SessionSizeResourceManager.java | 261 -- .../repo/domain/hibernate/StoreImpl.java | 204 -- .../repo/domain/hibernate/Transaction.hbm.xml | 215 -- .../domain/hibernate/TransactionImpl.java | 120 - .../repo/domain/node/ChildAssocEntity.java | 2 +- .../alfresco/repo/domain/patch/PatchDAO.java | 40 + .../domain/patch/ibatis/PatchDAOImpl.java | 85 +- .../repo/node/BaseNodeServiceTest.java | 67 +- .../repo/node/FullNodeServiceTest.java | 5 - .../node/archive/ArchiveAndRestoreTest.java | 8 - .../repo/node/db/DbNodeServiceImpl.java | 11 +- .../alfresco/repo/node/db/NodeDaoService.java | 39 - .../HibernateNodeDaoServiceImpl.java | 2339 ----------------- .../hibernate/SessionSizeManagementTest.java | 130 - .../index/IndexTransactionTrackerTest.java | 1 - .../repo/remote/LoaderRemoteServer.java | 31 +- .../search/impl/lucene/ADMLuceneTest.java | 6 - .../impl/AbstractPermissionTest.java | 1 - .../impl/model/PermissionModelTest.java | 8 - .../AlfrescoTransactionSupport.java | 8 +- .../repo/version/VersionMigrator.java | 5 - 53 files changed, 420 insertions(+), 7687 deletions(-) delete mode 100644 source/java/org/alfresco/repo/admin/patch/impl/InvalidNameEndingPatch.java delete mode 100644 source/java/org/alfresco/repo/domain/ChildAssoc.java delete mode 100644 source/java/org/alfresco/repo/domain/Node.java delete mode 100644 source/java/org/alfresco/repo/domain/NodeAssoc.java delete mode 100644 source/java/org/alfresco/repo/domain/Server.java delete mode 100644 source/java/org/alfresco/repo/domain/Store.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/AppliedPatch.hbm.xml delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/DirtySessionAnnotation.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/DirtySessionMethodInterceptor.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelper.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResource.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceException.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceProvider.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperTest.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/HibernateSessionSupport.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/LifecycleAdapter.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/LocaleUserType.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/SessionSizeResourceManager.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml delete mode 100644 source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java delete mode 100644 source/java/org/alfresco/repo/node/db/NodeDaoService.java delete mode 100644 source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java delete mode 100644 source/java/org/alfresco/repo/node/db/hibernate/SessionSizeManagementTest.java diff --git a/config/alfresco/avm-services-context.xml b/config/alfresco/avm-services-context.xml index beabe68e52..7770ef2da2 100644 --- a/config/alfresco/avm-services-context.xml +++ b/config/alfresco/avm-services-context.xml @@ -252,32 +252,4 @@ - - - - - - - - 500 - - - 250 - - - - - - - - - 100 - - - 100 - - - 0 - - diff --git a/config/alfresco/extension/bm-remote-loader-context.xml.sample b/config/alfresco/extension/bm-remote-loader-context.xml.sample index 50d48a5741..3bcaebce65 100644 --- a/config/alfresco/extension/bm-remote-loader-context.xml.sample +++ b/config/alfresco/extension/bm-remote-loader-context.xml.sample @@ -48,9 +48,6 @@ - - - diff --git a/config/alfresco/hibernate-context.xml b/config/alfresco/hibernate-context.xml index ca4971330d..266403e976 100644 --- a/config/alfresco/hibernate-context.xml +++ b/config/alfresco/hibernate-context.xml @@ -52,13 +52,6 @@ - - - - - org/alfresco/repo/domain/hibernate/Node.hbm.xml - org/alfresco/repo/domain/hibernate/Transaction.hbm.xml - org/alfresco/repo/audit/hibernate/Audit.hbm.xml @@ -160,11 +153,6 @@ - ${cache.strategy} - ${cache.strategy} - ${cache.strategy} - ${cache.strategy} - ${cache.strategy} ${cache.strategy} ${cache.strategy} ${cache.strategy} @@ -172,7 +160,6 @@ - ${cache.strategy} @@ -205,36 +192,6 @@ - - - - - - - - 10000 - - - 5000 - - - - - - - - 2000 - - - 50000 - - - 3 - - - - - @@ -247,10 +204,4 @@ - - - - - - \ No newline at end of file diff --git a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml index 2383cd560b..6ef08ee713 100644 --- a/config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml +++ b/config/alfresco/ibatis/org.hibernate.dialect.Dialect/patch-common-SqlMap.xml @@ -16,7 +16,7 @@ - + @@ -24,16 +24,32 @@ - + + + + + + + + + + + + + + + + + - + @@ -41,11 +57,23 @@ - + + + + + + + + + + + + + @@ -102,6 +130,46 @@ alf_authority + + + + + + @@ -129,5 +197,15 @@ where content_mimetype_id = ? + + + update + alf_child_assoc + set + qname_crc = ?, + child_node_name_crc = ? + where + id = ? + \ No newline at end of file diff --git a/config/alfresco/messages/patch-service.properties b/config/alfresco/messages/patch-service.properties index adc93c6ca2..3ab8208ef5 100644 --- a/config/alfresco/messages/patch-service.properties +++ b/config/alfresco/messages/patch-service.properties @@ -298,8 +298,8 @@ patch.authorityDefaultZonesPatch.groups= Adding groups to zones ... patch.fixNameCrcValues.description=Fixes name and qname CRC32 values to match UTF-8 encoding. patch.fixNameCrcValues.result=Fixed CRC32 values for UTF-8 encoding for {0} node child associations. See file {1} for details. -patch.fixNameCrcValues.fixed=Updated CRC32 values for node ID {0}, name ''{1}'': {2} -> {3}, qname ''{4}'': {5} -> {6}. -patch.fixNameCrcValues.unableToChange=Failed to update the CRC32 value for node ID {0}: \n Node name: {1} \n name CRC old: {2} \n name CRC new: {3} \n Node qname: {4} \n qname CRC old: {5} \n qname CRC new: {6} \n Error: {7} +patch.fixNameCrcValues.fixed=Updated CRC32 values for association ID {0}, name ''{1}'': {2} -> {3}, qname ''{4}'': {5} -> {6}. +patch.fixNameCrcValues.unableToChange=Failed to update the CRC32 value for association ID {0}: \n Node name: {1} \n name CRC old: {2} \n name CRC new: {3} \n qname: {4} \n qname CRC old: {5} \n qname CRC new: {6} \n Error: {7} patch.personUsagePatch.description=Add person 'cm:sizeCurrent' property (if missing). patch.personUsagePatch.result1=Added 'cm:sizeCurrent' property to {0} people that were missing this property. diff --git a/config/alfresco/node-services-context.xml b/config/alfresco/node-services-context.xml index 089230a65a..75f999cc6e 100644 --- a/config/alfresco/node-services-context.xml +++ b/config/alfresco/node-services-context.xml @@ -233,9 +233,6 @@ - - ${system.cascadeDeleteInTransaction} - diff --git a/config/alfresco/patch/patch-services-context.xml b/config/alfresco/patch/patch-services-context.xml index 1eb2039b0d..78468cd02f 100644 --- a/config/alfresco/patch/patch-services-context.xml +++ b/config/alfresco/patch/patch-services-context.xml @@ -468,15 +468,14 @@ 3.3.x - + patch.InvalidNameEnding patch.invalidNameEnding.description 0 21 22 - - - + + 3.3.x @@ -530,9 +529,11 @@ 0 33 34 - - - + + + + + @@ -1810,15 +1811,12 @@ - + - + - - - @@ -1958,11 +1956,8 @@ - - - - - + + @@ -1970,12 +1965,6 @@ - - - - - - diff --git a/config/alfresco/repository.properties b/config/alfresco/repository.properties index c70e28b25c..5ac66c6cc2 100644 --- a/config/alfresco/repository.properties +++ b/config/alfresco/repository.properties @@ -126,11 +126,6 @@ system.maximumStringLength=-1 # events may not group if there are post action listener registered (this is not the case with the default distribution) system.hibernateMaxExecutions=20000 -# -# Determine if document deletion and archival must cascade delete in the same -# transaction that triggers the operation. -system.cascadeDeleteInTransaction=true - # # Determine if modification timestamp propagation from child to parent nodes is respected or not. # Even if 'true', the functionality is only supported for child associations that declare the diff --git a/source/java/org/alfresco/repo/admin/patch/impl/AuthorityDefaultZonesPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/AuthorityDefaultZonesPatch.java index d525aae9cf..33134be336 100644 --- a/source/java/org/alfresco/repo/admin/patch/impl/AuthorityDefaultZonesPatch.java +++ b/source/java/org/alfresco/repo/admin/patch/impl/AuthorityDefaultZonesPatch.java @@ -23,10 +23,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.springframework.extensions.surf.util.I18NUtil; import org.alfresco.repo.admin.patch.AbstractPatch; import org.alfresco.repo.admin.patch.PatchExecuter; -import org.alfresco.repo.domain.hibernate.HibernateSessionHelper; import org.alfresco.service.cmr.avm.AVMService; import org.alfresco.service.cmr.avm.AVMStoreDescriptor; import org.alfresco.service.cmr.security.AuthorityService; @@ -35,6 +33,7 @@ import org.alfresco.service.cmr.site.SiteInfo; import org.alfresco.service.cmr.site.SiteService; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.extensions.surf.util.I18NUtil; /** * Patch to assign users and groups to default zones @@ -51,13 +50,9 @@ public class AuthorityDefaultZonesPatch extends AbstractPatch /** The authority service. */ private AuthorityService authorityService; - private AVMService avmService; - private SiteService siteService; - private HibernateSessionHelper hibernateSessionHelper; - /** * Sets the authority service. * @@ -87,14 +82,6 @@ public class AuthorityDefaultZonesPatch extends AbstractPatch this.siteService = siteService; } - /** - * @param hibernateSessionHelper - */ - public void setHibernateSessionHelper(HibernateSessionHelper hibernateSessionHelper) - { - this.hibernateSessionHelper = hibernateSessionHelper; - } - @Override protected String applyInternal() throws Exception { @@ -167,9 +154,7 @@ public class AuthorityDefaultZonesPatch extends AbstractPatch private int setZones(AuthorityType authorityType, List actions, int total, int start) { int count = start; - hibernateSessionHelper.mark(); Set authorities = authorityService.getAllAuthorities(authorityType); - hibernateSessionHelper.reset(); for (String authority : authorities) { for (Action action : actions) @@ -188,7 +173,6 @@ public class AuthorityDefaultZonesPatch extends AbstractPatch break; } } - hibernateSessionHelper.reset(); count++; reportProgress(total, count); } diff --git a/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java index 12dfddda01..bbc1395df5 100644 --- a/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java +++ b/source/java/org/alfresco/repo/admin/patch/impl/FixNameCrcValuesPatch.java @@ -21,45 +21,31 @@ package org.alfresco.repo.admin.patch.impl; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; -import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.sql.Savepoint; -import java.util.AbstractCollection; import java.util.Collection; import java.util.Date; -import java.util.Iterator; -import java.util.zip.CRC32; +import java.util.List; +import java.util.Map; -import org.alfresco.model.ContentModel; import org.alfresco.repo.admin.patch.AbstractPatch; import org.alfresco.repo.admin.patch.PatchExecuter; +import org.alfresco.repo.batch.BatchProcessWorkProvider; import org.alfresco.repo.batch.BatchProcessor; import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorker; -import org.alfresco.repo.domain.ChildAssoc; -import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.control.ControlDAO; -import org.alfresco.repo.domain.hibernate.ChildAssocImpl; -import org.alfresco.repo.domain.node.NodeDAO; +import org.alfresco.repo.domain.node.ChildAssocEntity; +import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.repo.security.authentication.AuthenticationUtil; -import org.alfresco.service.cmr.admin.PatchException; -import org.alfresco.service.cmr.rule.RuleService; +import org.alfresco.service.cmr.dictionary.AssociationDefinition; +import org.alfresco.service.cmr.dictionary.DictionaryException; +import org.alfresco.service.cmr.dictionary.DictionaryService; import org.alfresco.service.namespace.QName; import org.alfresco.util.TempFileProvider; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.hibernate.SQLQuery; -import org.hibernate.ScrollMode; -import org.hibernate.ScrollableResults; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.dialect.Dialect; -import org.hibernate.dialect.MySQLDialect; -import org.hibernate.type.LongType; import org.springframework.extensions.surf.util.I18NUtil; -import org.springframework.orm.hibernate3.HibernateCallback; -import org.springframework.orm.hibernate3.support.HibernateDaoSupport; /** * Fixes ETWOTWO-1133. @@ -74,27 +60,21 @@ public class FixNameCrcValuesPatch extends AbstractPatch private static final String MSG_REWRITTEN = "patch.fixNameCrcValues.fixed"; private static final String MSG_UNABLE_TO_CHANGE = "patch.fixNameCrcValues.unableToChange"; - private SessionFactory sessionFactory; - private NodeDAO nodeDAO; + private PatchDAO patchDAO; private QNameDAO qnameDAO; private ControlDAO controlDAO; - private RuleService ruleService; - private Dialect dialect; + private DictionaryService dictionaryService; + private static Log logger = LogFactory.getLog(FixNameCrcValuesPatch.class); private static Log progress_logger = LogFactory.getLog(PatchExecuter.class); public FixNameCrcValuesPatch() { } - public void setSessionFactory(SessionFactory sessionFactory) + public void setPatchDAO(PatchDAO patchDAO) { - this.sessionFactory = sessionFactory; - } - - public void setNodeDAO(NodeDAO nodeDAO) - { - this.nodeDAO = nodeDAO; + this.patchDAO = patchDAO; } /** @@ -112,26 +92,20 @@ public class FixNameCrcValuesPatch extends AbstractPatch { this.controlDAO = controlDAO; } - - /** - * @param ruleService the rule service - */ - public void setRuleService(RuleService ruleService) - { - this.ruleService = ruleService; - } - public void setDialect(Dialect dialect) + /** + * @param dictionaryService used to check the child associations for unique checking + */ + public void setDictionaryService(DictionaryService dictionaryService) { - this.dialect = dialect; + this.dictionaryService = dictionaryService; } @Override protected void checkProperties() { super.checkProperties(); - checkPropertyNotNull(sessionFactory, "sessionFactory"); - checkPropertyNotNull(nodeDAO, "nodeDAO"); + checkPropertyNotNull(patchDAO, "patchDAO"); checkPropertyNotNull(qnameDAO, "qnameDAO"); checkPropertyNotNull(applicationEventPublisher, "applicationEventPublisher"); } @@ -140,9 +114,7 @@ public class FixNameCrcValuesPatch extends AbstractPatch protected String applyInternal() throws Exception { // initialise the helper - HibernateHelper helper = new HibernateHelper(); - helper.setSessionFactory(sessionFactory); - + FixNameCrcValuesHelper helper = new FixNameCrcValuesHelper(); try { String msg = helper.fixCrcValues(); @@ -155,12 +127,14 @@ public class FixNameCrcValuesPatch extends AbstractPatch } } - private class HibernateHelper extends HibernateDaoSupport + private class FixNameCrcValuesHelper { private File logFile; private FileChannel channel; + private Integer assocCount; + private Long minAssocId = 0L; - private HibernateHelper() throws IOException + private FixNameCrcValuesHelper() throws IOException { // put the log file into a long life temp directory File tempDir = TempFileProvider.getLongLifeTempDir("patches"); @@ -176,12 +150,12 @@ public class FixNameCrcValuesPatch extends AbstractPatch writeLine("FixNameCrcValuesPatch executing on " + new Date()); } - private HibernateHelper write(Object obj) throws IOException + private FixNameCrcValuesHelper write(Object obj) throws IOException { channel.write(ByteBuffer.wrap(obj.toString().getBytes("UTF-8"))); return this; } - private HibernateHelper writeLine(Object obj) throws IOException + private FixNameCrcValuesHelper writeLine(Object obj) throws IOException { write(obj); write("\n"); @@ -194,76 +168,118 @@ public class FixNameCrcValuesPatch extends AbstractPatch public String fixCrcValues() throws Exception { + BatchProcessWorkProvider> workProvider = new BatchProcessWorkProvider>() + { + public synchronized int getTotalEstimatedWorkSize() + { + if (assocCount == null) + { + assocCount = patchDAO.getChildAssocCount(); + } + return assocCount.intValue(); + } + + public Collection> getNextWork() + { + // Get the next collection + List> results = patchDAO.getChildAssocsForCrcFix(minAssocId, 1000); + // Find out what the last ID is + int resultsSize = results.size(); + if (resultsSize > 0) + { + Map lastResult = results.get(resultsSize - 1); + Long id = (Long) lastResult.get("id"); + minAssocId = id + 1L; + } + // Hand back the results + return results; + } + }; + // get the association types to check - BatchProcessor batchProcessor = new BatchProcessor( + BatchProcessor> batchProcessor = new BatchProcessor>( "FixNameCrcValuesPatch", transactionService.getRetryingTransactionHelper(), - getChildAssocIdCollection(), + workProvider, 2, 20, applicationEventPublisher, - logger, 1000); + progress_logger, 1000); - // Precautionary flush and clear so that we have an empty session - getSession().flush(); - getSession().clear(); - - int updated = batchProcessor.process(new BatchProcessWorker() + BatchProcessWorker> worker = new BatchProcessWorker>() { - public String getIdentifier(Long entry) + public String getIdentifier(Map entry) { return entry.toString(); } public void beforeProcess() throws Throwable { - // Switch rules off - ruleService.disableRules(); - // Authenticate as system - String systemUsername = AuthenticationUtil.getSystemUserName(); - AuthenticationUtil.setFullyAuthenticatedUser(systemUsername); } - public void process(Long childAssocId) throws Throwable + public void process(Map row) throws Throwable { - ChildAssoc assoc = (ChildAssoc) getHibernateTemplate().get(ChildAssocImpl.class, childAssocId); - if (assoc == null) + Long assocId = (Long) row.get("id"); + Long typeQNameId = (Long) row.get("typeQNameId"); + Long qnameNamespaceId = (Long) row.get("qnameNamespaceId"); + String qnameLocalName = (String) row.get("qnameLocalName"); + Long childNodeNameCrc = (Long) row.get("childNodeNameCrc"); + Long qnameCrc = (Long) row.get("qnameCrc"); + String childNodeUuid = (String) row.get("childNodeUuid"); + String childNodeName = (String) row.get("childNodeName"); + // Use the UUID if there is no cm:name + childNodeName = (childNodeName == null) ? childNodeUuid : childNodeName; + // Resolve QNames + QName typeQName = qnameDAO.getQName(typeQNameId).getSecond(); + String namespace = qnameDAO.getNamespace(qnameNamespaceId).getSecond(); + QName qname = QName.createQName(namespace, qnameLocalName); + + ChildAssocEntity entity = new ChildAssocEntity(); + entity.setChildNodeNameAll(dictionaryService, typeQName, childNodeName); + entity.setQNameAll(qnameDAO, qname, false); + // Check the CRC values for cm:name + if (entity.getChildNodeNameCrc().equals(childNodeNameCrc)) { - // Missing now ... - return; + // Check the CRC for the QName + if (entity.getQnameCrc().equals(qnameCrc)) + { + // This child assoc is good + return; + } } - // Get the old CRCs - long oldChildCrc = assoc.getChildNodeNameCrc(); - long oldQNameCrc = assoc.getQnameCrc(); - // Get the child node - Node childNode = assoc.getChild(); - // Get the name - String childName = (String) nodeDAO.getNodeProperty(childNode.getId(), ContentModel.PROP_NAME); - if (childName == null) - { - childName = childNode.getUuid(); - } - // Update the CRCs - long childCrc = getCrc(childName); - QName qname = assoc.getQName(qnameDAO); - long qnameCrc = ChildAssocImpl.getCrc(qname); - - // Update the assoc - assoc.setChildNodeNameCrc(childCrc); - assoc.setQnameCrc(qnameCrc); - // Persist - Savepoint savepoint = controlDAO.createSavepoint("FixNameCrcValuesPatch"); + Savepoint savepoint = null; try { - getSession().flush(); + AssociationDefinition assocDef = dictionaryService.getAssociation(typeQName); + if (assocDef == null) + { + throw new DictionaryException("Association type not defined: " + typeQName); + } + + // Being here indicates that the association needs to be updated + savepoint = controlDAO.createSavepoint("FixNameCrcValuesPatch"); + patchDAO.updateChildAssocCrc(assocId, childNodeNameCrc, qnameCrc); controlDAO.releaseSavepoint(savepoint); + + String msg = I18NUtil.getMessage( + MSG_REWRITTEN, + assocId, + childNodeName, childNodeNameCrc, entity.getChildNodeNameCrc(), + qname, qnameCrc, entity.getQnameCrc()); + writeLine(msg); } catch (Throwable e) { - controlDAO.rollbackToSavepoint(savepoint); - - String msg = I18NUtil.getMessage(MSG_UNABLE_TO_CHANGE, childNode.getId(), childName, oldChildCrc, - childCrc, qname, oldQNameCrc, qnameCrc, e.getMessage()); + if (savepoint != null) + { + controlDAO.rollbackToSavepoint(savepoint); + } + String msg = I18NUtil.getMessage( + MSG_UNABLE_TO_CHANGE, + assocId, + childNodeName, childNodeNameCrc, entity.getChildNodeNameCrc(), + qname, qnameCrc, entity.getQnameCrc(), + e.getMessage()); // We just log this and add details to the message file if (logger.isDebugEnabled()) { @@ -275,134 +291,17 @@ public class FixNameCrcValuesPatch extends AbstractPatch } writeLine(msg); } - getSession().clear(); - // Record - writeLine(I18NUtil.getMessage(MSG_REWRITTEN, childNode.getId(), childName, oldChildCrc, childCrc, - qname, oldQNameCrc, qnameCrc)); } public void afterProcess() throws Throwable { - ruleService.enableRules(); } - }, true); - + }; + + int updated = batchProcessor.process(worker, true); String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile); return msg; } - - private Collection getChildAssocIdCollection() throws Exception - { - HibernateCallback callback = new HibernateCallback() - { - public ScrollableResults doInHibernate(Session session) - { - SQLQuery query = session - .createSQLQuery( - "SELECT ca.id AS child_assoc_id FROM alf_child_assoc ca"); - - // For MySQL databases we must set this unusual fetch size to force result set paging. See - // http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-implementation-notes.html - if (dialect instanceof MySQLDialect) - { - query.setFetchSize(Integer.MIN_VALUE); - } - query.addScalar("child_assoc_id", new LongType()); - return query.scroll(ScrollMode.FORWARD_ONLY); - } - }; - final ScrollableResults rs; - try - { - final int sizeEstimate = getHibernateTemplate().execute(new HibernateCallback() - { - public Integer doInHibernate(Session session) - { - SQLQuery query = session.createSQLQuery("SELECT COUNT(*) FROM alf_child_assoc"); - return ((Number) query.uniqueResult()).intValue(); - } - }); - - rs = getHibernateTemplate().execute(callback); - return new AbstractCollection() - { - @Override - public Iterator iterator() - { - return new Iterator(){ - - private Long next = fetchNext(); - - private Long fetchNext() - { - Long next; - if (rs.next()) - { - next = rs.getLong(0); - } - else - { - next = null; - rs.close(); - } - return next; - } - - public boolean hasNext() - { - return next != null; - } - - public Long next() - { - if (!hasNext()) - { - throw new IllegalStateException(); - } - Long oldNext = next; - next = fetchNext(); - return oldNext; - } - - public void remove() - { - throw new UnsupportedOperationException(); - }}; - } - - @Override - public int size() - { - return sizeEstimate; - } - - }; - } - catch (Throwable e) - { - logger.error("Failed to query for child association IDs", e); - writeLine("Failed to query for child association IDs: " + e.getMessage()); - throw new PatchException("Failed to query for child association IDs", e); - } - } - - /** - * @param str the name that will be converted to lowercase - * @return the CRC32 calcualted on the lowercase version of the string - */ - private long getCrc(String str) - { - CRC32 crc = new CRC32(); - try - { - crc.update(str.toLowerCase().getBytes("UTF-8")); // https://issues.alfresco.com/jira/browse/ALFCOM-1335 - } - catch (UnsupportedEncodingException e) - { - throw new RuntimeException("UTF-8 encoding is not supported"); - } - return crc.getValue(); - } } } diff --git a/source/java/org/alfresco/repo/admin/patch/impl/InvalidNameEndingPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/InvalidNameEndingPatch.java deleted file mode 100644 index a31d7f29bd..0000000000 --- a/source/java/org/alfresco/repo/admin/patch/impl/InvalidNameEndingPatch.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.admin.patch.impl; - -import java.io.File; -import java.io.IOException; -import java.io.RandomAccessFile; -import java.nio.ByteBuffer; -import java.nio.channels.FileChannel; -import java.util.Date; -import java.util.List; - -import org.alfresco.model.ContentModel; -import org.alfresco.repo.admin.patch.AbstractPatch; -import org.alfresco.repo.domain.Node; -import org.alfresco.service.cmr.admin.PatchException; -import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.util.TempFileProvider; -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.springframework.extensions.surf.util.I18NUtil; -import org.springframework.orm.hibernate3.HibernateCallback; -import org.springframework.orm.hibernate3.support.HibernateDaoSupport; - -/** - * Checks that all names do not end with ' ' or '.' - * - * @author David Caruana - */ -public class InvalidNameEndingPatch extends AbstractPatch -{ - private static final String MSG_SUCCESS = "patch.invalidNameEnding.result"; - private static final String MSG_REWRITTEN = "patch.invalidNameEnding.rewritten"; - private static final String ERR_UNABLE_TO_FIX = "patch.invalidNameEnding.err.unable_to_fix"; - - private SessionFactory sessionFactory; - - public static void main(String[] args) - { - String name = "fred. ... "; - - int i = (name.length() == 0) ? 0 : name.length() - 1; - while (i >= 0 && (name.charAt(i) == '.' || name.charAt(i) == ' ')) - { - i--; - } - - String updatedName = (i == 0) ? "unnamed" : name.substring(0, i + 1); - System.out.println(updatedName); - } - - - public InvalidNameEndingPatch() - { - } - - public void setSessionFactory(SessionFactory sessionFactory) - { - this.sessionFactory = sessionFactory; - } - - @Override - protected void checkProperties() - { - super.checkProperties(); - checkPropertyNotNull(sessionFactory, "sessionFactory"); - } - - @Override - protected String applyInternal() throws Exception - { - // initialise the helper - HibernateHelper helper = new HibernateHelper(); - helper.setSessionFactory(sessionFactory); - - try - { - String msg = helper.fixNames(); - // done - return msg; - } - finally - { - helper.closeWriter(); - } - } - - private class HibernateHelper extends HibernateDaoSupport - { - private File logFile; - private FileChannel channel; - - private HibernateHelper() throws IOException - { - File tempDir = TempFileProvider.getLongLifeTempDir("patches"); - logFile = new File(tempDir, "InvalidNameEndingPatch.log"); - - // open the file for appending - RandomAccessFile outputFile = new RandomAccessFile(logFile, "rw"); - channel = outputFile.getChannel(); - // move to the end of the file - channel.position(channel.size()); - // add a newline and it's ready - writeLine("").writeLine(""); - writeLine("InvalidNameEndingPatch executing on " + new Date()); - } - - private HibernateHelper write(Object obj) throws IOException - { - channel.write(ByteBuffer.wrap(obj.toString().getBytes())); - return this; - } - private HibernateHelper writeLine(Object obj) throws IOException - { - write(obj); - write("\n"); - return this; - } - private void closeWriter() - { - try { channel.close(); } catch (Throwable e) {} - } - - public String fixNames() throws Exception - { - // get the association types to check - List nodes = getInvalidNames(); - - int updated = 0; - for (Node node : nodes) - { - NodeRef nodeRef = node.getNodeRef(); - String name = (String)nodeService.getProperty(nodeRef, ContentModel.PROP_NAME); - if (name != null && (name.endsWith(".") || name.endsWith(" "))) - { - int i = (name.length() == 0) ? 0 : name.length() - 1; - while (i >= 0 && (name.charAt(i) == '.' || name.charAt(i) == ' ')) - { - i--; - } - - String updatedName = (i == 0) ? "unnamed" : name.substring(0, i + 1); - int idx = 0; - boolean applied = false; - while (!applied) - { - try - { - nodeService.setProperty(nodeRef, ContentModel.PROP_NAME, updatedName); - applied = true; - } - catch(DuplicateChildNodeNameException e) - { - idx++; - if (idx > 10) - { - writeLine(I18NUtil.getMessage(ERR_UNABLE_TO_FIX, name ,updatedName)); - throw new PatchException(ERR_UNABLE_TO_FIX, logFile); - } - updatedName += "_" + idx; - } - } - writeLine(I18NUtil.getMessage(MSG_REWRITTEN, name ,updatedName)); - updated++; - getSession().flush(); - getSession().clear(); - } - } - - String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile); - return msg; - } - - @SuppressWarnings("unchecked") - private List getInvalidNames() - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session - .createQuery( - "select node from org.alfresco.repo.domain.hibernate.NodeImpl as node " + - "join node.properties prop where " + - " prop.stringValue like '%.' or " + - " prop.stringValue like '% ' "); - return query.list(); - } - }; - List results = (List) getHibernateTemplate().execute(callback); - return results; - } - - } -} diff --git a/source/java/org/alfresco/repo/admin/patch/impl/LinkNodeFileExtensionPatch.java b/source/java/org/alfresco/repo/admin/patch/impl/LinkNodeFileExtensionPatch.java index a3a9f62dab..e5f6d6f5cf 100644 --- a/source/java/org/alfresco/repo/admin/patch/impl/LinkNodeFileExtensionPatch.java +++ b/source/java/org/alfresco/repo/admin/patch/impl/LinkNodeFileExtensionPatch.java @@ -22,21 +22,20 @@ import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; +import java.sql.Savepoint; import java.util.Date; import java.util.List; import org.alfresco.model.ContentModel; import org.alfresco.repo.admin.patch.AbstractPatch; -import org.alfresco.repo.domain.hibernate.NodeImpl; -import org.alfresco.service.cmr.admin.PatchException; +import org.alfresco.repo.domain.control.ControlDAO; +import org.alfresco.repo.domain.patch.PatchDAO; import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException; import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.util.Pair; import org.alfresco.util.TempFileProvider; -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.SessionFactory; import org.springframework.extensions.surf.util.I18NUtil; -import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; /** @@ -50,45 +49,47 @@ public class LinkNodeFileExtensionPatch extends AbstractPatch private static final String MSG_REWRITTEN = "patch.linkNodeExtension.rewritten"; private static final String ERR_UNABLE_TO_FIX = "patch.linkNodeExtension.err.unable_to_fix"; - private SessionFactory sessionFactory; + private PatchDAO patchDAO; + private ControlDAO controlDAO; + private NodeService nodeService; /** * Default constructor - * */ public LinkNodeFileExtensionPatch() { } - /** - * Set the session factory - * - * @param sessionFactory SessionFactory - */ - public void setSessionFactory(SessionFactory sessionFactory) + public void setPatchDAO(PatchDAO patchDAO) { - this.sessionFactory = sessionFactory; + this.patchDAO = patchDAO; + } + + public void setControlDAO(ControlDAO controlDAO) + { + this.controlDAO = controlDAO; + } + + public void setNodeService(NodeService nodeService) + { + this.nodeService = nodeService; } @Override protected void checkProperties() { super.checkProperties(); - checkPropertyNotNull(sessionFactory, "sessionFactory"); + checkPropertyNotNull(patchDAO, "patchDAO"); + checkPropertyNotNull(nodeService, "nodeService"); } @Override protected String applyInternal() throws Exception { - // Initialise the helper - - HibernateHelper helper = new HibernateHelper(); - helper.setSessionFactory(sessionFactory); + LinkNodeFileExtensionHelper helper = new LinkNodeFileExtensionHelper(); try { - // Fix the link node file names - return helper.fixNames(); } finally @@ -97,12 +98,12 @@ public class LinkNodeFileExtensionPatch extends AbstractPatch } } - private class HibernateHelper extends HibernateDaoSupport + private class LinkNodeFileExtensionHelper extends HibernateDaoSupport { private File logFile; private FileChannel channel; - private HibernateHelper() throws IOException + private LinkNodeFileExtensionHelper() throws IOException { // Open a log file File tempDir = TempFileProvider.getLongLifeTempDir("patches"); @@ -119,12 +120,12 @@ public class LinkNodeFileExtensionPatch extends AbstractPatch writeLine("LinkNodeExtensionPatch executing on " + new Date()); } - private HibernateHelper write(Object obj) throws IOException + private LinkNodeFileExtensionHelper write(Object obj) throws IOException { channel.write(ByteBuffer.wrap(obj.toString().getBytes())); return this; } - private HibernateHelper writeLine(Object obj) throws IOException + private LinkNodeFileExtensionHelper writeLine(Object obj) throws IOException { write(obj); write("\n"); @@ -137,78 +138,46 @@ public class LinkNodeFileExtensionPatch extends AbstractPatch public String fixNames() throws Exception { - // Get the list of nodes to be updated - - List nodes = getInvalidNames(); + List> names = patchDAO.getNodesOfTypeWithNamePattern(ContentModel.TYPE_LINK, "%.lnk"); int updated = 0; - for (NodeImpl node : nodes) + for (Pair pair : names) { - // Check that the node is a link node - - NodeRef nodeRef = node.getNodeRef(); - - if ( nodeService.getProperty(nodeRef, ContentModel.PROP_LINK_DESTINATION) != null) + NodeRef nodeRef = pair.getFirst(); + String name = pair.getSecond(); + // Update the name string, replace '.lnk' with '.url' + String updatedName = name.substring(0, name.length() - 4) + ".url"; + int idx = 0; + boolean applied = false; + while (!applied && idx < 10) { - // Get the current file name - - String name = (String)nodeService.getProperty(nodeRef, ContentModel.PROP_NAME); - if (name != null && name.length() >= 4 && name.endsWith(".lnk")) - { - // Update the name string, replace '.lnk' with '.url' - - String updatedName = name.substring(0, name.length() - 4) + ".url"; - - int idx = 0; - boolean applied = false; - while (!applied) - { - try - { - nodeService.setProperty(nodeRef, ContentModel.PROP_NAME, updatedName); - applied = true; - } - catch(DuplicateChildNodeNameException e) - { - idx++; - if (idx > 10) - { - writeLine(I18NUtil.getMessage(ERR_UNABLE_TO_FIX, name, updatedName)); - throw new PatchException(ERR_UNABLE_TO_FIX, logFile); - } - updatedName += "_" + idx; - } - } - writeLine(I18NUtil.getMessage(MSG_REWRITTEN, name ,updatedName)); - updated++; - getSession().flush(); - getSession().clear(); - } + Savepoint savepoint = controlDAO.createSavepoint("LinkNodeFileExtensionsFix"); + try + { + nodeService.setProperty(nodeRef, ContentModel.PROP_NAME, updatedName); + controlDAO.releaseSavepoint(savepoint); + applied = true; + } + catch(DuplicateChildNodeNameException e) + { + controlDAO.rollbackToSavepoint(savepoint); + idx++; + updatedName += "_" + idx; + } } + if (applied) + { + writeLine(I18NUtil.getMessage(MSG_REWRITTEN, name ,updatedName)); + } + else + { + writeLine(I18NUtil.getMessage(ERR_UNABLE_TO_FIX, name, updatedName)); + } + updated++; } String msg = I18NUtil.getMessage(MSG_SUCCESS, updated, logFile); return msg; } - - @SuppressWarnings("unchecked") - private List getInvalidNames() - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session - .createQuery( - "select node from org.alfresco.repo.domain.hibernate.NodeImpl as node " + - "join node.properties prop where " + - " prop.stringValue like '%.lnk' "); - return query.list(); - } - }; - List results = (List) getHibernateTemplate().execute(callback); - return results; - } - } } diff --git a/source/java/org/alfresco/repo/domain/ChildAssoc.java b/source/java/org/alfresco/repo/domain/ChildAssoc.java deleted file mode 100644 index 7773f70558..0000000000 --- a/source/java/org/alfresco/repo/domain/ChildAssoc.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain; - -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.service.cmr.repository.ChildAssociationRef; -import org.alfresco.service.namespace.QName; - -/** - * Represents a special type of association between nodes, that of the - * parent-child relationship. - * - * @author Derek Hulley - */ -public interface ChildAssoc extends Comparable -{ - /** - * Performs the necessary work on the provided nodes to ensure that a bidirectional - * association is properly set up. - *

- * The association attributes still have to be set up. - * - * @param parentNode - * @param childNode - * - * @see #setName(String) - * @see #setIsPrimary(boolean) - */ - public void buildAssociation(Node parentNode, Node childNode); - - /** - * Performs the necessary work on the {@link #getParent() parent} and - * {@link #getChild() child} nodes to maintain the inverse association sets - */ - public void removeAssociation(); - - /** - * A convenience method to get a reference to this association. - * - * @param qnameDAO the DAO to resolve the qname references - * @return Returns a reference to this association - */ - public ChildAssociationRef getChildAssocRef(QNameDAO qnameDAO); - - /** - * Convenience method to get the association's type - * - * @param qnameDAO the helper DAO - * @return the association's type QName - */ - public QName getTypeQName(QNameDAO qnameDAO); - - /** - * Convenience method to set the association's type - * - * @param qnameDAO the helper DAO - * @param typeQName the association's type QName - */ - public void setTypeQName(QNameDAO qnameDAO, QName typeQName); - - /** - * A convenience method to aggregate the qualified name's namespace and localname - * into a single qualified name. - * - * @return Returns the qualified name of the association - */ - public QName getQName(QNameDAO qnameDAO); - - /** - * Convenience method to set the association's qname - * - * @param qnameDAO the helper DAO - * @param qname the association's QName - */ - public void setQName(QNameDAO qnameDAO, QName qname); - - public Long getId(); - - /** - * @return Return the current version number - */ - public Long getVersion(); - - public Node getParent(); - - public Node getChild(); - - /** - * @return Returns the type of the association - */ - public Long getTypeQNameId(); - - /** - * @param typeQNameId the association's dictionary type - */ - public void setTypeQNameId(Long typeQNameId); - - /** - * @return Returns the child node name. This may be truncated, in which case it - * will end with ... - */ - public String getChildNodeName(); - - /** - * @param childNodeName the name of the child node, which may be truncated and - * terminated with ... in order to not exceed 50 characters. - */ - public void setChildNodeName(String childNodeName); - - /** - * @return Returns the crc value for the child node name. - */ - public long getChildNodeNameCrc(); - - /** - * @param crc the crc value - */ - public void setChildNodeNameCrc(long crc); - - /** - * @return Returns the namespace of the association's local QName - */ - public Long getQnameNamespaceId(); - - /** - * @param namespaceId the namespace of the association's local QName - */ - public void setQnameNamespaceId(Long namespaceId); - - /** - * @return Returns the localname of the association's local QName - */ - public String getQnameLocalName(); - - /** - * @param localName the localname of the association's local QName - */ - public void setQnameLocalName(String localName); - - /** - * @return Returns the crc value for the association's local QName - */ - public long getQnameCrc(); - - /** - * @param crc the crc value - */ - public void setQnameCrc(long crc); - - public boolean getIsPrimary(); - - public void setIsPrimary(boolean isPrimary); - - /** - * @return Returns the user-assigned index - */ - public int getIndex(); - - /** - * Set the index of this association - * - * @param index the association index - */ - public void setIndex(int index); -} diff --git a/source/java/org/alfresco/repo/domain/Node.java b/source/java/org/alfresco/repo/domain/Node.java deleted file mode 100644 index 6367f9e70f..0000000000 --- a/source/java/org/alfresco/repo/domain/Node.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain; - -import java.util.Map; - -import org.alfresco.repo.domain.node.NodePropertyKey; -import org.alfresco.repo.domain.node.NodePropertyValue; -import org.alfresco.repo.domain.node.Transaction; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.namespace.QName; - -/** - * Interface for persistent node objects. - *

- * Specific instances of nodes are unique, but may share GUIDs across stores. - * - * @author Derek Hulley - */ -public interface Node -{ - /** - * Convenience method to get the reference to the node - * - * @return Returns the reference to this node - */ - public NodeRef getNodeRef(); - - /** - * Convenience method to get the node's type - * - * @param qnameDAO the helper DAO - * @return the node's type QName - */ - public QName getTypeQName(QNameDAO qnameDAO); - - /** - * Convenience method to set the node's type - * - * @param qnameDAO the helper DAO - * @param qname the node's type QName - */ - public void setTypeQName(QNameDAO qnameDAO, QName qname); - - /** - * @return Returns the auto-generated ID - */ - public Long getId(); - - /** - * @return Returns the current version number - */ - public Long getVersion(); - - public Store getStore(); - - public void setStore(Store store); - - public String getUuid(); - - public void setUuid(String uuid); - - public Transaction getTransaction(); - - public void setTransaction(Transaction transaction); - - public boolean getDeleted(); - - public void setDeleted(boolean deleted); - - public Long getTypeQNameId(); - - public void setTypeQNameId(Long typeQNameId); - - public Long getAclId(); - - public void setAclId(Long aclId); - - public Map getProperties(); - - public AuditableProperties getAuditableProperties(); - - public void setAuditableProperties(AuditableProperties auditableProperties); -} diff --git a/source/java/org/alfresco/repo/domain/NodeAssoc.java b/source/java/org/alfresco/repo/domain/NodeAssoc.java deleted file mode 100644 index 9aeace7492..0000000000 --- a/source/java/org/alfresco/repo/domain/NodeAssoc.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain; - -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.service.cmr.repository.AssociationRef; -import org.alfresco.service.namespace.QName; - -/** - * Represents a generic association between two nodes. The association is named - * and bidirectional by default. - * - * @author Derek Hulley - */ -public interface NodeAssoc -{ - /** - * Wires up the necessary bits on the source and target nodes so that the association - * is immediately bidirectional. - *

- * The association attributes still have to be set. - * - * @param sourceNode - * @param targetNode - * - * @see #setName(String) - */ - public void buildAssociation(Node sourceNode, Node targetNode); - - /** - * Convenience method to retrieve the association's reference - * - * @param qnameDAO helper DAO - * @return the association's reference - */ - public AssociationRef getNodeAssocRef(QNameDAO qnameDAO); - - /** - * Convenience method to retrieve the association's type QName - * - * @param qnameDAO helper DAO - * @return the association's type QName - */ - public QName getTypeQName(QNameDAO qnameDAO); - - /** - * Convenience method to set the association's type - * - * @param qnameDAO the helper DAO - * @param typeQName the association's type QName - */ - public void setTypeQName(QNameDAO qnameDAO, QName typeQName); - - public Long getId(); - - /** - * @return Returns the current version number - */ - public Long getVersion(); - - public Node getSource(); - - public Node getTarget(); - - /** - * @return Returns the type of the association - */ - public Long getTypeQNameId(); - - /** - * @param typeQNameId the association's dictionary type - */ - public void setTypeQNameId(Long typeQNameId); -} diff --git a/source/java/org/alfresco/repo/domain/Server.java b/source/java/org/alfresco/repo/domain/Server.java deleted file mode 100644 index bc6d6073b4..0000000000 --- a/source/java/org/alfresco/repo/domain/Server.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain; - -/** - * Interface for persistent server objects. These persist - * details of the servers that have committed transactions to the - * database, for instance. - * - * @author Derek Hulley - */ -public interface Server -{ - public Long getId(); - - public Long getVersion(); - - public String getIpAddress(); - - public void setIpAddress(String ipAddress); -} diff --git a/source/java/org/alfresco/repo/domain/Store.java b/source/java/org/alfresco/repo/domain/Store.java deleted file mode 100644 index 7a7005a14f..0000000000 --- a/source/java/org/alfresco/repo/domain/Store.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain; - -import org.alfresco.service.cmr.repository.StoreRef; - -/** - * Represents a store entity - * - * @author Derek Hulley - */ -public interface Store -{ - /** - * @return Returns the current version number used for optimistic locking - */ - public Long getVersion(); - - /** - * @return Returns the unique ID of the object - */ - public Long getId(); - - /** - * @return the store protocol - */ - public String getProtocol(); - - /** - * @param protocol the store protocol - */ - public void setProtocol(String protocol); - - /** - * @return the store identifier - */ - public String getIdentifier(); - - /** - * @param identifier the store identifier - */ - public void setIdentifier(String identifier); - - /** - * @return Returns the root of the store - */ - public Node getRootNode(); - - /** - * @param rootNode mandatory association to the root of the store - */ - public void setRootNode(Node rootNode); - - /** - * Convenience method to access the reference - * @return Returns the reference to the store - */ - public StoreRef getStoreRef(); -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/AppliedPatch.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/AppliedPatch.hbm.xml deleted file mode 100644 index 427b826b3c..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/AppliedPatch.hbm.xml +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - select - appliedPatch - from - org.alfresco.repo.domain.hibernate.AppliedPatchImpl as appliedPatch - - - - = :fromDate and - appliedPatch.appliedOnDate <= :toDate - order by appliedPatch.appliedOnDate - ]]> - - - diff --git a/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java b/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java deleted file mode 100644 index 2d6e06069d..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/ChildAssocImpl.java +++ /dev/null @@ -1,548 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; -import java.io.UnsupportedEncodingException; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; -import java.util.zip.CRC32; - -import org.alfresco.repo.domain.ChildAssoc; -import org.alfresco.repo.domain.Node; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.service.cmr.repository.ChildAssociationRef; -import org.alfresco.service.namespace.QName; -import org.alfresco.util.EqualsHelper; - -/** - * @author Derek Hulley - */ -public class ChildAssocImpl implements ChildAssoc, Serializable -{ - private static final long serialVersionUID = -8993272236626580410L; - - private Long id; - private Long version; - private Node parent; - private Node child; - private Long typeQNameId; - private Long qnameNamespaceId; - private String qnameLocalName; - private long qnameCrc; - private String childNodeName; - private long childNodeNameCrc; - private boolean isPrimary; - private int index; - - private transient ReadLock refReadLock; - private transient WriteLock refWriteLock; - private transient ChildAssociationRef childAssocRef; - private transient QName typeQName; - private transient QName qname; - - public ChildAssocImpl() - { - ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - refReadLock = lock.readLock(); - refWriteLock = lock.writeLock(); - - index = -1; // The index is irrelevant - } - - public void buildAssociation(Node parentNode, Node childNode) - { - // add the forward associations - this.setParent(parentNode); - this.setChild(childNode); - } - - public void removeAssociation() - { - } - - /** - * {@inheritDoc} - *

- * This method is thread-safe and lazily creates the required references, if required. - */ - public ChildAssociationRef getChildAssocRef(QNameDAO qnameDAO) - { - boolean trashReference = false; - // first check if it is available - refReadLock.lock(); - try - { - if (childAssocRef != null) - { - // double check that the parent and child node references match those of our reference - if (childAssocRef.getParentRef() != parent.getNodeRef() || - childAssocRef.getChildRef() != child.getNodeRef()) - { - trashReference = true; - } - else - { - // we are sure that the reference is correct - return childAssocRef; - } - } - } - finally - { - refReadLock.unlock(); - } - // get write lock - refWriteLock.lock(); - try - { - // double check - if (childAssocRef == null || trashReference) - { - if (typeQName == null) - { - typeQName = qnameDAO.getQName(this.typeQNameId).getSecond(); - } - if (qname == null ) - { - String qnameNamespace = qnameDAO.getNamespace(qnameNamespaceId).getSecond(); - qname = QName.createQName(qnameNamespace, qnameLocalName); - } - childAssocRef = new ChildAssociationRef( - typeQName, - parent.getNodeRef(), - qname, - child.getNodeRef(), - this.isPrimary, - index); - } - return childAssocRef; - } - finally - { - refWriteLock.unlock(); - } - } - - /** - * {@inheritDoc} - */ - public QName getTypeQName(QNameDAO qnameDAO) - { - refReadLock.lock(); - try - { - if (typeQName != null) - { - return typeQName; - } - } - finally - { - refReadLock.unlock(); - } - refWriteLock.lock(); - try - { - typeQName = qnameDAO.getQName(typeQNameId).getSecond(); - return typeQName; - } - finally - { - refWriteLock.unlock(); - } - } - - public void setTypeQName(QNameDAO qnameDAO, QName typeQName) - { - Long typeQNameId = qnameDAO.getOrCreateQName(typeQName).getFirst(); - refWriteLock.lock(); - try - { - setTypeQNameId(typeQNameId); - } - finally - { - refWriteLock.unlock(); - } - } - - /** - * {@inheritDoc} - *

- * This method is thread-safe and lazily creates the required references, if required. - */ - public QName getQName(QNameDAO qnameDAO) - { - // first check if it is available - refReadLock.lock(); - try - { - if (qname != null) - { - return qname; - } - } - finally - { - refReadLock.unlock(); - } - // get write lock - refWriteLock.lock(); - try - { - // double check - if (qname == null ) - { - String qnameNamespace = qnameDAO.getNamespace(qnameNamespaceId).getSecond(); - qname = QName.createQName(qnameNamespace, qnameLocalName); - } - return qname; - } - finally - { - refWriteLock.unlock(); - } - } - - public void setQName(QNameDAO qnameDAO, QName qname) - { - String assocQNameNamespace = qname.getNamespaceURI(); - String assocQNameLocalName = qname.getLocalName(); - Long assocQNameNamespaceId = qnameDAO.getOrCreateNamespace(assocQNameNamespace).getFirst(); - Long assocQNameCrc = getCrc(qname); - // get write lock - refWriteLock.lock(); - try - { - setQnameNamespaceId(assocQNameNamespaceId); - setQnameLocalName(assocQNameLocalName); - setQnameCrc(assocQNameCrc); - } - finally - { - refWriteLock.unlock(); - } - } - - public static long getCrc(QName qname) - { - CRC32 crc = new CRC32(); - try - { - crc.update(qname.getNamespaceURI().getBytes("UTF-8")); - crc.update(qname.getLocalName().getBytes("UTF-8")); - } - catch (UnsupportedEncodingException e) - { - throw new RuntimeException("UTF-8 encoding is not supported"); - } - return crc.getValue(); - - } - - public boolean equals(Object obj) - { - if (obj == null) - { - return false; - } - else if (obj == this) - { - return true; - } - else if (!(obj instanceof ChildAssoc)) - { - return false; - } - ChildAssoc that = (ChildAssoc) obj; - if (EqualsHelper.nullSafeEquals(id, that.getId())) - { - return true; - } - else - { - return (EqualsHelper.nullSafeEquals(this.getParent(), that.getParent()) - && EqualsHelper.nullSafeEquals(this.typeQNameId, that.getTypeQNameId()) - && EqualsHelper.nullSafeEquals(this.getChild(), that.getChild()) - && EqualsHelper.nullSafeEquals(this.qnameLocalName, that.getQnameLocalName()) - && EqualsHelper.nullSafeEquals(this.qnameNamespaceId, that.getQnameNamespaceId()) - ); - } - } - - public int hashCode() - { - return (child == null ? 0 : child.hashCode()); - } - - public String toString() - { - StringBuffer sb = new StringBuffer(32); - sb.append("ChildAssoc") - .append("[ id=").append(id) - .append(", parent=").append(parent.getId()) - .append(", child=").append(child.getId()) - .append(", child name=").append(childNodeName) - .append(", child name crc=").append(childNodeNameCrc) - .append(", assoc type=").append(typeQNameId) - .append(", assoc qname ns=").append(qnameNamespaceId) - .append(", assoc qname localname=").append(qnameLocalName) - .append(", assoc qname crc=").append(qnameCrc) - .append(", isPrimary=").append(isPrimary) - .append("]"); - return sb.toString(); - } - - /** - * Orders the child associations by ID. A smaller ID has a higher priority. - * This may change once we introduce a changeable index against which to order. - */ - public int compareTo(ChildAssoc another) - { - if (this == another) - { - return 0; - } - - int thisIndex = this.getIndex(); - int anotherIndex = another.getIndex(); - - Long thisId = this.getId(); - Long anotherId = another.getId(); - - if (thisId == null) // this ID has not been set, make this instance greater - { - return -1; - } - else if (anotherId == null) // other ID has not been set, make this instance lesser - { - return 1; - } - else if (thisIndex == anotherIndex) // use the explicit index - { - return thisId.compareTo(anotherId); - } - else // fallback on order of creation - { - return (thisIndex > anotherIndex) ? 1 : -1; // a lower index, make this instance lesser - } - } - - public Long getId() - { - return id; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setId(Long id) - { - this.id = id; - } - - public Long getVersion() - { - return version; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setVersion(Long version) - { - this.version = version; - } - - public Node getParent() - { - return parent; - } - - /** - * For Hibernate use - */ - private void setParent(Node parentNode) - { - refWriteLock.lock(); - try - { - this.parent = parentNode; - this.childAssocRef = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public Node getChild() - { - return child; - } - - /** - * For Hibernate use - */ - private void setChild(Node node) - { - refWriteLock.lock(); - try - { - child = node; - this.childAssocRef = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public Long getTypeQNameId() - { - return typeQNameId; - } - - public void setTypeQNameId(Long typeQNameId) - { - refWriteLock.lock(); - try - { - this.typeQNameId = typeQNameId; - this.childAssocRef = null; - this.typeQName = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public Long getQnameNamespaceId() - { - return qnameNamespaceId; - } - - public void setQnameNamespaceId(Long qnameNamespaceId) - { - refWriteLock.lock(); - try - { - this.qnameNamespaceId = qnameNamespaceId; - this.childAssocRef = null; - this.qname = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public String getQnameLocalName() - { - return qnameLocalName; - } - - public void setQnameLocalName(String qnameLocalName) - { - refWriteLock.lock(); - try - { - this.qnameLocalName = qnameLocalName; - this.childAssocRef = null; - this.qname = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public long getQnameCrc() - { - return qnameCrc; - } - - public void setQnameCrc(long crc) - { - this.qnameCrc = crc; - } - - public String getChildNodeName() - { - return childNodeName; - } - - public void setChildNodeName(String childNodeName) - { - this.childNodeName = childNodeName; - } - - public long getChildNodeNameCrc() - { - return childNodeNameCrc; - } - - public void setChildNodeNameCrc(long crc) - { - this.childNodeNameCrc = crc; - } - - public boolean getIsPrimary() - { - return isPrimary; - } - - public void setIsPrimary(boolean isPrimary) - { - refWriteLock.lock(); - try - { - this.isPrimary = isPrimary; - this.childAssocRef = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public int getIndex() - { - return index; - } - - public void setIndex(int index) - { - refWriteLock.lock(); - try - { - this.index = index; - this.childAssocRef = null; - } - finally - { - refWriteLock.unlock(); - } - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/DirtySessionAnnotation.java b/source/java/org/alfresco/repo/domain/hibernate/DirtySessionAnnotation.java deleted file mode 100644 index f9fd7d9534..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/DirtySessionAnnotation.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Annotation defining Hibernate session flushing and dirty marking - * - * @since 2.1.5 - * @author Derek Hulley - */ -@Target(ElementType.METHOD) -@Retention(RetentionPolicy.RUNTIME) -public @interface DirtySessionAnnotation -{ - /** - * The session must be flagged as dirty after execution.
- * Default: false - */ - boolean markDirty() default false; -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/DirtySessionMethodInterceptor.java b/source/java/org/alfresco/repo/domain/hibernate/DirtySessionMethodInterceptor.java deleted file mode 100644 index d72b003936..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/DirtySessionMethodInterceptor.java +++ /dev/null @@ -1,444 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.Stack; - -import org.alfresco.error.StackTraceUtil; -import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.alfresco.util.Pair; -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.hibernate.Criteria; -import org.hibernate.FlushMode; -import org.hibernate.Query; -import org.hibernate.Session; - -/** - * This method interceptor determines if a Hibernate flush is required and performs the - * flush if necessary. The primary purpose is to avoid the Hibernate "flush if required" checks - * that occur every time a query is made to the database - whether or not any actual modifications - * have been made to the session. - *

- * Write methods (methods that modify the Hibernate Session) will flag the transaction as dirty. - * Methods that query the database can {@link #setQueryFlushMode(Session, Query) set the flush mode} - * without knowing whether the session is dirty or not. - *

- * The interceptor uses the {@link DirtySessionAnnotation}. If the annotation is not used, then - * no session dirty checks will be done but a WARN message will be output. - *

- * The flush data is kept as a transaction-local resource. For this reason, all calls must be made - * in the context of a transaction. For the same reason, the methods on the FlushData are - * not synchronized as access is only available by one thread. - *

- * It is also possible to {@link #flushSession(Session) flush the session} manually. Using this method - * allows the dirty count to be updated properly, thus avoiding unecessary flushing. - *

- * To trace failures caused by data flushes, it is necessary to track methods called and values passed - * that lead to the session being marked as dirty. If the flush fails or if a method call fails then - * the stacks and method values will be dumped. Turn on trace debugging for this:
- *

- *    log4j.logger.org.alfresco.repo.domain.hibernate.DirtySessionMethodInterceptor.trace=DEBUG
- * 
- * - * @see #setQueryFlushMode(Session, Query) - * @see #flushSession(Session) - * - * @author Derek Hulley - * @since 2.1.5 - */ -public class DirtySessionMethodInterceptor implements MethodInterceptor -{ - private static final String KEY_FLUSH_DATA = "FlushIfRequiredMethodInterceptor.FlushData"; - - private static Log logger = LogFactory.getLog(DirtySessionMethodInterceptor.class); - private static final boolean loggerDebugEnabled; - private static Log traceLogger = LogFactory.getLog(DirtySessionMethodInterceptor.class.getName() + ".trace"); - private static final boolean traceLoggerDebugEnabled; - static - { - loggerDebugEnabled = logger.isDebugEnabled(); - traceLoggerDebugEnabled = traceLogger.isDebugEnabled(); - if (traceLoggerDebugEnabled) - { - traceLogger.warn("Trace logging is enabled and will affect performance"); - } - } - - /** - * Keep track of methods that have been warned about, i.e. methods that are not annotated. - */ - private static Set unannotatedMethodNames; - static - { - unannotatedMethodNames = Collections.synchronizedSet(new HashSet(0)); - } - - /** - * Data on whether the session is dirty or not. - * - * @author Derek Hulley - */ - private static class FlushData - { - private int dirtyCount; - private Stack> methodStack; - private List traceStacks; - private FlushData() - { - dirtyCount = 0; - methodStack = new Stack>(); - } - @Override - public String toString() - { - StringBuilder sb = new StringBuilder(64); - sb.append("FlushData") - .append("[dirtyCount=").append(dirtyCount) - .append(", methodStack=").append(methodStack) - .append("]"); - return sb.toString(); - } - public void incrementDirtyCount() - { - dirtyCount++; - } - public boolean isDirty() - { - return dirtyCount > 0; - } - public void resetDirtyCount() - { - dirtyCount = 0; - traceStacks = null; - } - public void pushMethod(String methodName, boolean isAnnotated) - { - methodStack.push(new Pair(methodName, Boolean.valueOf(isAnnotated))); - } - public Pair popMethod() - { - return methodStack.pop(); - } - public Pair currentMethod() - { - return methodStack.peek(); - } - /** - * @return Returns true if all the methods in the method stack are annotated, - * otherwise false - */ - public boolean isStackAnnotated() - { - for (Pair stackElement : methodStack) - { - if (stackElement.getSecond().equals(Boolean.FALSE)) - { - // Found one that was not annotated - return false; - } - } - // All were annotated - return true; - } - public void addTraceStack(MethodInvocation method) - { - Exception e = new Exception(); - e.fillInStackTrace(); - StringBuilder sb = new StringBuilder(2048); - sb.append(" Method: ").append(method.getMethod().getName()); - Object[] arguments = method.getArguments(); - for (int i = 0; i < arguments.length; i++) - { - String argumentStr; - try - { - argumentStr = arguments[i] == null ? "NULL" : arguments[i].toString(); - } - catch (Throwable ee) - { - argumentStr = "(10); - } - traceStacks.add(sb.toString()); - } - public List getTraceStacks() - { - return (traceStacks == null) ? Collections.emptyList() : traceStacks; - } - } - - /** - * @return Returns the transaction-local flush data - */ - private static FlushData getFlushData() - { - FlushData flushData = (FlushData) AlfrescoTransactionSupport.getResource(KEY_FLUSH_DATA); - if (flushData == null) - { - flushData = new FlushData(); - AlfrescoTransactionSupport.bindResource(KEY_FLUSH_DATA, flushData); - } - return flushData; - } - - /** - * Set the query flush mode according to whether the session is dirty or not. - * - * @param session the Hibernate session - * @param query the Hibernate query that will be issued - */ - public static void setQueryFlushMode(Session session, Query query) - { - FlushData flushData = DirtySessionMethodInterceptor.getFlushData(); - - // If all the methods in the method stack are annotated, then we can adjust the query and - // play with the session - if (!flushData.isStackAnnotated()) - { - if (loggerDebugEnabled) - { - logger.debug( - "Method stack is not annotated. Not setting query flush mode: \n" + - " Flush Data: " + flushData); - } - return; - } - - // The stack is fully annotated, so flush if required and set the flush mode on the query - if (loggerDebugEnabled) - { - logger.debug( - "Setting query flush mode: \n" + - " Query: " + query.getQueryString() + "\n" + - " Dirty: " + flushData); - } - - if (flushData.isDirty()) - { - // Flush the session - session.flush(); - // Reset the dirty state - flushData.resetDirtyCount(); - } - // Adjust the query flush mode - query.setFlushMode(FlushMode.MANUAL); - } - - public static void setCriteriaFlushMode(Session session, Criteria criteria) - { - FlushData flushData = DirtySessionMethodInterceptor.getFlushData(); - - // If all the methods in the method stack are annotated, then we can adjust the query and - // play with the session - if (!flushData.isStackAnnotated()) - { - if (loggerDebugEnabled) - { - logger.debug( - "Method stack is not annotated. Not setting query flush mode: \n" + - " Flush Data: " + flushData); - } - return; - } - - // The stack is fully annotated, so flush if required and set the flush mode on the query - if (loggerDebugEnabled) - { - logger.debug( - "Setting query flush mode: \n" + - " Criteria: " + criteria.toString() + "\n" + - " Dirty: " + flushData); - } - - if (flushData.isDirty()) - { - // Flush the session - session.flush(); - // Reset the dirty state - flushData.resetDirtyCount(); - } - // Adjust the query flush mode - criteria.setFlushMode(FlushMode.MANUAL); - } - - /** - * Manually mark the session as dirty. - */ - public static void setSessionDirty() - { - FlushData flushData = DirtySessionMethodInterceptor.getFlushData(); - flushData.incrementDirtyCount(); - } - - /** - * Flush and reset the dirty count for the current transaction. The session is - * only flushed if it currently dirty. - * - * @param session the Hibernate session - */ - public static void flushSession(Session session) - { - flushSession(session, false); - } - - /** - * Flush and reset the dirty count for the current transaction. - * Use this one if you know that the session has changeds that might not - * have been recorded by the DAO interceptors. - * - * @param session the Hibernate session - * @param force true to force a flush. - */ - public static void flushSession(Session session, boolean force) - { - FlushData flushData = DirtySessionMethodInterceptor.getFlushData(); - if (force) - { - if (loggerDebugEnabled) - { - logger.debug( - "Flushing session forcefully: \n" + - " Dirty: " + flushData); - } - session.flush(); - flushData.resetDirtyCount(); - } - else - { - if (flushData.isDirty()) - { - if (loggerDebugEnabled) - { - logger.debug( - "Flushing dirty session: \n" + - " Dirty: " + flushData); - } - session.flush(); - flushData.resetDirtyCount(); - } - else - { - if (loggerDebugEnabled) - { - logger.debug( - "Session is not dirty - no flush: \n" + - " Dirty: " + flushData); - } - } - } - } - - /** Default constructor */ - public DirtySessionMethodInterceptor() - { - } - - public Object invoke(MethodInvocation invocation) throws Throwable - { - Method method = invocation.getMethod(); - String methodName = method.getName(); - - // Get the flush and dirty mark requirements for the call - DirtySessionAnnotation annotation = method.getAnnotation(DirtySessionAnnotation.class); - boolean markDirty = false; - if (annotation != null) - { - markDirty = annotation.markDirty(); - } - else if (unannotatedMethodNames.add(methodName)) - { - logger.warn("Method has not been annotated with the DirtySessionAnnotation: " + method); - } - - FlushData flushData = DirtySessionMethodInterceptor.getFlushData(); - - // If we are to mark it dirty and we are tracing, then record the stacks - if (markDirty && traceLoggerDebugEnabled) - { - flushData.addTraceStack(invocation); - } - - boolean isAnnotated = (annotation != null); - Object ret = null; - try - { - // Push the method onto the stack - flushData.pushMethod(methodName, isAnnotated); - - if (loggerDebugEnabled) - { - logger.debug( - "Flush state and parameters for DirtySessionInterceptor: \n" + - " Method: " + methodName + "\n" + - " Annotated: MARK-DIRTY=" + markDirty + "\n" + - " Session State: " + flushData); - } - - // Do the call - try - { - ret = invocation.proceed(); - } - catch (Throwable e) - { - // If we are tracing, then dump the current dirty stack - if (traceLoggerDebugEnabled) - { - traceLogger.debug("Dumping stack traces after exception: " + e.getMessage()); - for (String stackTrace : flushData.getTraceStacks()) - { - traceLogger.debug("\n" + stackTrace); - } - } - // Rethrow - throw e; - } - - if (markDirty) - { - flushData.incrementDirtyCount(); - } - } - finally - { - // Restore the dirty session awareness state - flushData.popMethod(); - } - - // Done - return ret; - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelper.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelper.java deleted file mode 100644 index d4bedef17a..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelper.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.util.List; - -import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.springframework.orm.hibernate3.support.HibernateDaoSupport; - -/** - * Utililty support against hibernate sessions. Supported by a super event listener which is registered on the even - * listener of the hibernate session. - * - * @author andyh - */ -public class HibernateSessionHelper extends HibernateDaoSupport implements HibernateSessionSupport -{ - /** - * - */ - private static final long serialVersionUID = -2532286150392812816L; - private static final String HIBERNATE_SESSION_EVENT_LISTENER = "HibernateSessionEventListener"; - - public void mark() - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.mark(getSession()); - } - - public void mark(String label) - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.mark(getSession(), label); - } - - public void reset() - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.reset(getSession()); - } - - public void reset(String label) - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.reset(getSession(), label); - } - - public void removeMark() - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.removeMark(getSession()); - } - - public void removeMark(String label) - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.removeMark(getSession(), label); - } - - public void resetAndRemoveMark() - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.resetAndRemoveMark(getSession()); - } - - public void resetAndRemoveMark(String label) - { - HibernateSessionHelperResourceProvider resource = getResource(); - resource.resetAndRemoveMark(getSession(), label); - } - - public List getMarks() - { - HibernateSessionHelperResourceProvider resource = getResource(); - return resource.getMarks(getSession()); - } - - public String getCurrentMark() - { - HibernateSessionHelperResourceProvider resource = getResource(); - return resource.getCurrentMark(); - } - - public static HibernateSessionHelperResourceProvider getResource() - { - HibernateSessionHelperResourceProvider listener = (HibernateSessionHelperResourceProvider) AlfrescoTransactionSupport.getResource(HIBERNATE_SESSION_EVENT_LISTENER); - if (listener == null) - { - listener = new HibernateSessionHelperResource(); - AlfrescoTransactionSupport.bindResource(HIBERNATE_SESSION_EVENT_LISTENER, listener); - } - return listener; - } - - - - -} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResource.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResource.java deleted file mode 100644 index fdbf15c8a1..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResource.java +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Set; - -import org.alfresco.util.GUID; -import org.hibernate.Session; -import org.hibernate.engine.EntityKey; - -/** - * Support to (optionally) listen to hibernate events generated by a hibernate session. The tracking is bound to a - * transaction resource - * - * @author andyh - */ -public class HibernateSessionHelperResource implements HibernateSessionHelperResourceProvider -{ - LinkedHashMap> marks = new LinkedHashMap>(); - - String currentMark = null; - - HibernateSessionHelperResource() - { - - } - - public String getCurrentMark() - { - return currentMark; - } - - public List getMarks(Session session) - { - ArrayList answer = new ArrayList(marks.size()); - for (String key : marks.keySet()) - { - answer.add(key); - } - return answer; - } - - public void mark(Session session) - { - String guid = GUID.generate(); - mark(session, guid); - } - - @SuppressWarnings("unchecked") - public void mark(Session session, String label) - { - if (label == null) - { - throw new HibernateSessionHelperResourceException("Null key is not supported"); - - } - if (marks.containsKey(label)) - { - throw new HibernateSessionHelperResourceException("Key already exists - " + label); - } - - if (marks.size() == 0) - { - SessionSizeResourceManager.setDisableInTransaction(); - } - - HashSet mark = new HashSet((Set) session.getStatistics().getEntityKeys()); - - // If the mark is too large, then the flush process will be excessive. - if (mark.size() > 1000) - { - // The session is to big. Use the mark to as a basis for cleaning out the session. - if (currentMark == null) - { - // The session is just too big - SessionSizeResourceManager.clear(session); - } - else - { - reset(session); - } - // Get the mark list again - mark = new HashSet((Set) session.getStatistics().getEntityKeys()); - } - - marks.put(label, mark); - currentMark = label; - } - - public void removeMark(Session session) - { - if (currentMark != null) - { - removeMark(session, currentMark); - } - else - { - throw new HibernateSessionHelperResourceException("No current mark"); - } - - } - - public void removeMark(Session session, String label) - { - if (label == null) - { - throw new HibernateSessionHelperResourceException("Null key is not supported"); - - } - if (!marks.containsKey(label)) - { - throw new HibernateSessionHelperResourceException("Key does not exist - " + label); - } - - if (marks.size() > 0) - { - marks.remove(label); - - if (label.equals(currentMark)) - { - currentMark = getLastMarkOrNull(); - } - } - - if (marks.size() == 0) - { - SessionSizeResourceManager.setEnableInTransaction(); - } - } - - public void reset(Session session) - { - if (currentMark != null) - { - doResetAndRemove(session, currentMark, false); - } - else - { - throw new HibernateSessionHelperResourceException("No current mark"); - } - } - - public void reset(Session session, String label) - { - doResetAndRemove(session, label, false); - } - - public void resetAndRemoveMark(Session session) - { - if (currentMark != null) - { - doResetAndRemove(session, currentMark, true); - } - else - { - throw new HibernateSessionHelperResourceException("No current mark"); - } - } - - public void resetAndRemoveMark(Session session, String label) - { - doResetAndRemove(session, label, true); - } - - @SuppressWarnings("unchecked") - private void doResetAndRemove(Session session, String label, boolean remove) - { - if (label == null) - { - throw new HibernateSessionHelperResourceException("Null key is not supported"); - - } - if (!marks.containsKey(label)) - { - throw new HibernateSessionHelperResourceException("Key does not exist - " + label); - } - if (marks.size() > 0) - { - session.flush(); - - Set check = marks.get(label); - Set current = new HashSet((Set) session.getStatistics().getEntityKeys()); - - Set toEvict = new HashSet(Math.max((int) (current.size() / .75f) + 1, 16)); - - for (EntityKey key : current) - { - if (!check.contains(key)) - { - Object val = session.get(key.getEntityName(), key.getIdentifier()); - if (val != null) - { - toEvict.add(val); - } - } - } - - for (Object evitee : toEvict) - { - session.evict(evitee); - } - - String last; - while ((last = getLastMarkOrNull()) != null) - { - if (!label.equals(last)) - { - marks.remove(last); - } - else - { - if (remove) - { - marks.remove(last); - } - break; - } - } - - currentMark = getLastMarkOrNull(); - - if (marks.size() == 0) - { - SessionSizeResourceManager.setEnableInTransaction(); - } - - //System.out.println("Removed "+marks.size()+" "+label); - } - } - - private String getLastMarkOrNull() - { - String mark = null; - for (String key : marks.keySet()) - { - mark = key; - } - return mark; - } - -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceException.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceException.java deleted file mode 100644 index 6269654b2d..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceException.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import org.alfresco.error.AlfrescoRuntimeException; - -public class HibernateSessionHelperResourceException extends AlfrescoRuntimeException -{ - - /** - * - */ - private static final long serialVersionUID = 2935681199033295625L; - - public HibernateSessionHelperResourceException(String msgId, Object[] msgParams, Throwable cause) - { - super(msgId, msgParams, cause); - // TODO Auto-generated constructor stub - } - - public HibernateSessionHelperResourceException(String msgId, Object[] msgParams) - { - super(msgId, msgParams); - // TODO Auto-generated constructor stub - } - - public HibernateSessionHelperResourceException(String msgId, Throwable cause) - { - super(msgId, cause); - // TODO Auto-generated constructor stub - } - - public HibernateSessionHelperResourceException(String msgId) - { - super(msgId); - // TODO Auto-generated constructor stub - } - -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceProvider.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceProvider.java deleted file mode 100644 index 397f78c5e0..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperResourceProvider.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.util.List; - -import org.hibernate.Session; - -public interface HibernateSessionHelperResourceProvider -{ - public void mark(Session session); - - public void mark(Session session, String label); - - public void reset(Session session); - - public void reset(Session session, String label); - - public void removeMark(Session session); - - public void removeMark(Session session, String label); - - public void resetAndRemoveMark(Session session); - - public void resetAndRemoveMark(Session session, String label); - - public List getMarks(Session session); - - public String getCurrentMark(); -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperTest.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperTest.java deleted file mode 100644 index 1442426d5c..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionHelperTest.java +++ /dev/null @@ -1,638 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; -import java.util.Date; -import java.util.Set; - -import javax.transaction.UserTransaction; - -import junit.framework.TestCase; - -import org.alfresco.model.ContentModel; -import org.alfresco.repo.domain.AuditableProperties; -import org.alfresco.repo.domain.Node; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.repo.domain.Server; -import org.alfresco.repo.domain.Store; -import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.alfresco.service.ServiceRegistry; -import org.alfresco.service.cmr.repository.StoreRef; -import org.alfresco.service.transaction.TransactionService; -import org.alfresco.util.ApplicationContextHelper; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.engine.EntityKey; -import org.springframework.context.ApplicationContext; -import org.springframework.orm.hibernate3.SessionFactoryUtils; - -public class HibernateSessionHelperTest extends TestCase -{ - private static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext(); - - private UserTransaction txn; - private SessionFactory sessionFactory; - - @Override - protected void setUp() throws Exception - { - sessionFactory = (SessionFactory) ctx.getBean("sessionFactory"); - ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean(ServiceRegistry.SERVICE_REGISTRY); - TransactionService transactionService = serviceRegistry.getTransactionService(); - txn = transactionService.getUserTransaction(); - txn.begin(); - - // force a flush to ensure that the database updates succeed - try - { - getSession().flush(); - getSession().clear(); - } - catch (Throwable e) - { - e.printStackTrace(); - } - } - - private Session getSession() - { - return SessionFactoryUtils.getSession(sessionFactory, true); - } - - @Override - protected void tearDown() - { - if (txn != null) - { - try - { - txn.rollback(); - } - catch (Throwable e) - { - // Don't let this hide errors coming from the tests - e.printStackTrace(); - } - } - } - - public void testSimpleMark() - { - assertEquals(0, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - - StoreImpl store = new StoreImpl(); - store.setProtocol(StoreRef.PROTOCOL_WORKSPACE); - store.setIdentifier("TestWorkspace@" + getName() + " - " + System.currentTimeMillis()); - // persist so that it is present in the hibernate cache - getSession().save(store); - - assertEquals(1, getSession().getStatistics().getEntityCount()); - - Server server = (Server) getSession().get(ServerImpl.class, new Long(1)); - if (server == null) - { - server = new ServerImpl(); - server.setIpAddress("" + "i_" + System.currentTimeMillis()); - getSession().save(server); - } - - assertEquals(2, getSession().getStatistics().getEntityCount()); - - HibernateSessionHelper helper = (HibernateSessionHelper) ctx.getBean("hibernateSessionHelper"); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - helper.mark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(1, helper.getMarks().size()); - - TransactionImpl transaction = new TransactionImpl(); - transaction.setServer(server); - transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); - Serializable txID = getSession().save(transaction); - - assertEquals(3, getSession().getStatistics().getEntityCount()); - - helper.reset(); - - assertEquals(2, getSession().getStatistics().getEntityCount()); - - getSession().get(TransactionImpl.class, txID); - - assertEquals(3, getSession().getStatistics().getEntityCount()); - - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - helper.resetAndRemoveMark(); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - - assertEquals(0, helper.getMarks().size()); - assertEquals(2, getSession().getStatistics().getEntityCount()); - - } - - public void testNestedMarks() - { - - assertEquals(0, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - - QNameDAO qnameDAO = (QNameDAO) ctx.getBean("qnameDAO"); - Long baseQNameId = qnameDAO.getOrCreateQName(ContentModel.TYPE_BASE).getFirst(); - - StoreImpl store = new StoreImpl(); - store.setProtocol(StoreRef.PROTOCOL_WORKSPACE); - store.setIdentifier("TestWorkspace@" + getName() + " - " + System.currentTimeMillis()); - // persist so that it is present in the hibernate cache - getSession().save(store); - - assertEquals(1, getSession().getStatistics().getEntityCount()); - - Server server = (Server) getSession().get(ServerImpl.class, new Long(1)); - if (server == null) - { - server = new ServerImpl(); - server.setIpAddress("" + "i_" + System.currentTimeMillis()); - getSession().save(server); - } - - TransactionImpl transaction = new TransactionImpl(); - transaction.setServer(server); - transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); - getSession().save(transaction); - - HibernateSessionHelper helper = (HibernateSessionHelper)ctx.getBean("hibernateSessionHelper"); - - assertEquals(3, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - helper.mark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(1, helper.getMarks().size()); - - Node n1 = createNode(transaction, store, "1", baseQNameId); - - assertEquals(4, getSession().getStatistics().getEntityCount()); - helper.mark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(2, helper.getMarks().size()); - - Node n2 = createNode(transaction, store, "2", baseQNameId); - - assertEquals(5, getSession().getStatistics().getEntityCount()); - helper.mark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(3, helper.getMarks().size()); - - Node n3 = createNode(transaction, store, "3", baseQNameId); - - assertEquals(6, getSession().getStatistics().getEntityCount()); - helper.mark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(4, helper.getMarks().size()); - - Node n4 = createNode(transaction, store, "4", baseQNameId); - - assertEquals(7, getSession().getStatistics().getEntityCount()); - helper.mark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - - Node n5 = createNode(transaction, store, "5", baseQNameId); - - assertEquals(8, getSession().getStatistics().getEntityCount()); - - helper.reset(); - assertEquals(8, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - getSession().get(NodeImpl.class, n5.getId()); - assertEquals(9, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertTrue(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.reset(); - assertEquals(8, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - getSession().get(NodeImpl.class, n5.getId()); - assertEquals(9, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertTrue(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.resetAndRemoveMark(); - - assertEquals(8, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(4, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.resetAndRemoveMark(); - - assertEquals(7, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(3, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertFalse(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - - helper.resetAndRemoveMark(); - - assertEquals(6, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(2, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertFalse(sessionContainsNode(n4)); - assertFalse(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.resetAndRemoveMark(); - - assertEquals(5, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(1, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertFalse(sessionContainsNode(n4)); - assertFalse(sessionContainsNode(n3)); - assertFalse(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.resetAndRemoveMark(); - - assertEquals(4, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(0, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertFalse(sessionContainsNode(n4)); - assertFalse(sessionContainsNode(n3)); - assertFalse(sessionContainsNode(n2)); - assertFalse(sessionContainsNode(n1)); - - try - { - helper.reset(); - fail("can not reset"); - } - catch(HibernateSessionHelperResourceException hshre) - { - - } - } - - public void testNamedMarks() - { - assertEquals(0, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - - StoreImpl store = new StoreImpl(); - store.setProtocol(StoreRef.PROTOCOL_WORKSPACE); - store.setIdentifier("TestWorkspace@" + getName() + " - " + System.currentTimeMillis()); - // persist so that it is present in the hibernate cache - getSession().save(store); - - assertEquals(1, getSession().getStatistics().getEntityCount()); - - Server server = (Server) getSession().get(ServerImpl.class, new Long(1)); - if (server == null) - { - server = new ServerImpl(); - server.setIpAddress("" + "i_" + System.currentTimeMillis()); - getSession().save(server); - } - - assertEquals(2, getSession().getStatistics().getEntityCount()); - - HibernateSessionHelper helper = (HibernateSessionHelper)ctx.getBean("hibernateSessionHelper"); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - helper.mark("One"); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(1, helper.getMarks().size()); - - TransactionImpl transaction = new TransactionImpl(); - transaction.setServer(server); - transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); - Serializable txID = getSession().save(transaction); - - assertEquals(3, getSession().getStatistics().getEntityCount()); - - helper.reset("One"); - - assertEquals(2, getSession().getStatistics().getEntityCount()); - - getSession().get(TransactionImpl.class, txID); - - assertEquals(3, getSession().getStatistics().getEntityCount()); - - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - helper.resetAndRemoveMark("One"); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - - assertEquals(0, helper.getMarks().size()); - assertEquals(2, getSession().getStatistics().getEntityCount()); - - } - - - public void testNestedNamedMarks() - { - assertEquals(0, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - - QNameDAO qnameDAO = (QNameDAO) ctx.getBean("qnameDAO"); - Long baseQNameId = qnameDAO.getOrCreateQName(ContentModel.TYPE_BASE).getFirst(); - - StoreImpl store = new StoreImpl(); - store.setProtocol(StoreRef.PROTOCOL_WORKSPACE); - store.setIdentifier("TestWorkspace@" + getName() + " - " + System.currentTimeMillis()); - // persist so that it is present in the hibernate cache - getSession().save(store); - - assertEquals(1, getSession().getStatistics().getEntityCount()); - - Server server = (Server) getSession().get(ServerImpl.class, new Long(1)); - if (server == null) - { - server = new ServerImpl(); - server.setIpAddress("" + "i_" + System.currentTimeMillis()); - getSession().save(server); - } - - TransactionImpl transaction = new TransactionImpl(); - transaction.setServer(server); - transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); - getSession().save(transaction); - - assertEquals(3, getSession().getStatistics().getEntityCount()); - - HibernateSessionHelper helper = (HibernateSessionHelper)ctx.getBean("hibernateSessionHelper"); - assertNull(helper.getCurrentMark()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - helper.mark("One"); - assertEquals("One", helper.getCurrentMark()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(1, helper.getMarks().size()); - - Node n1 = createNode(transaction, store, "1", baseQNameId); - - assertEquals(4, getSession().getStatistics().getEntityCount()); - helper.mark("Two"); - assertEquals("Two", helper.getCurrentMark()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(2, helper.getMarks().size()); - - Node n2 = createNode(transaction, store, "2", baseQNameId); - - assertEquals(5, getSession().getStatistics().getEntityCount()); - helper.mark("Three"); - assertEquals("Three", helper.getCurrentMark()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(3, helper.getMarks().size()); - - Node n3 = createNode(transaction, store, "3", baseQNameId); - - assertEquals(6, getSession().getStatistics().getEntityCount()); - helper.mark("Four"); - assertEquals("Four", helper.getCurrentMark()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(4, helper.getMarks().size()); - - Node n4 = createNode(transaction, store, "4", baseQNameId); - - assertEquals(7, getSession().getStatistics().getEntityCount()); - helper.mark("Five"); - assertEquals("Five", helper.getCurrentMark()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - - Node n5 = createNode(transaction, store, "5", baseQNameId); - - assertEquals(9, getSession().getStatistics().getEntityCount()); - - helper.reset("Five"); - assertEquals(8, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - getSession().get(NodeImpl.class, n5.getId()); - assertEquals(9, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertTrue(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.reset("Five"); - assertEquals(8, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - getSession().get(NodeImpl.class, n5.getId()); - assertEquals(9, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertTrue(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - assertEquals("Five", helper.getCurrentMark()); - helper.resetAndRemoveMark("Five"); - assertEquals("Four", helper.getCurrentMark()); - - assertEquals(8, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(4, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertTrue(sessionContainsNode(n4)); - assertTrue(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.resetAndRemoveMark("Three"); - assertEquals("Two", helper.getCurrentMark()); - - assertEquals(6, getSession().getStatistics().getEntityCount()); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(2, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertFalse(sessionContainsNode(n4)); - assertFalse(sessionContainsNode(n3)); - assertTrue(sessionContainsNode(n2)); - assertTrue(sessionContainsNode(n1)); - - helper.resetAndRemoveMark("One"); - assertNull(helper.getCurrentMark()); - - assertEquals(4, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(0, helper.getMarks().size()); - assertFalse(sessionContainsNode(n5)); - assertFalse(sessionContainsNode(n4)); - assertFalse(sessionContainsNode(n3)); - assertFalse(sessionContainsNode(n2)); - assertFalse(sessionContainsNode(n1)); - - try - { - helper.reset("One"); - fail("can not reset"); - } - catch(HibernateSessionHelperResourceException hshre) - { - - } - } - - public void voidTestRemove() - { - assertEquals(0, getSession().getStatistics().getEntityCount()); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - - StoreImpl store = new StoreImpl(); - store.setProtocol(StoreRef.PROTOCOL_WORKSPACE); - store.setIdentifier("TestWorkspace@" + getName() + " - " + System.currentTimeMillis()); - // persist so that it is present in the hibernate cache - getSession().save(store); - - assertEquals(1, getSession().getStatistics().getEntityCount()); - - Server server = (Server) getSession().get(ServerImpl.class, new Long(1)); - if (server == null) - { - server = new ServerImpl(); - server.setIpAddress("" + "i_" + System.currentTimeMillis()); - getSession().save(server); - } - - TransactionImpl transaction = new TransactionImpl(); - transaction.setServer(server); - transaction.setChangeTxnId(AlfrescoTransactionSupport.getTransactionId()); - getSession().save(transaction); - - assertEquals(3, getSession().getStatistics().getEntityCount()); - - HibernateSessionHelper helper = (HibernateSessionHelper)ctx.getBean("hibernateSessionHelper"); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - helper.mark("One"); - helper.mark("Two"); - helper.mark("Three"); - helper.mark("Four"); - helper.mark("Five"); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(5, helper.getMarks().size()); - assertEquals("Five", helper.getCurrentMark()); - - helper.removeMark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(4, helper.getMarks().size()); - assertEquals("Four", helper.getCurrentMark()); - - helper.removeMark("One"); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(3, helper.getMarks().size()); - assertEquals("Four", helper.getCurrentMark()); - - helper.removeMark(); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(2, helper.getMarks().size()); - assertEquals("Three", helper.getCurrentMark()); - - helper.removeMark("Two"); - assertTrue(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(1, helper.getMarks().size()); - assertEquals("Three", helper.getCurrentMark()); - - helper.removeMark("Three"); - assertFalse(SessionSizeResourceManager.isDisableInTransaction()); - assertEquals(0, helper.getMarks().size()); - assertNull(helper.getCurrentMark()); - } - - private Node createNode(TransactionImpl transaction, Store store, String uuid, Long typeQNameId) - { - // Create the Node - Node node = new NodeImpl(); - node.setStore(store); - node.setUuid(uuid); - node.setTypeQNameId(typeQNameId); - node.setTransaction(transaction); - node.setDeleted(false); - AuditableProperties ap = new AuditableProperties(); - node.setAuditableProperties(ap); - ap.setAuditValues("system", new Date(), false); - getSession().save(node); - - return node; - } - - @SuppressWarnings("unchecked") - private boolean sessionContainsNode(Node node) - { - Long nodeId = node.getId(); - Set keys = (Set)getSession().getStatistics().getEntityKeys(); - for(EntityKey key : keys) - { - if(key.getEntityName().equals(NodeImpl.class.getName())) - { - if(key.getIdentifier().equals(nodeId)) - { - return true; - } - } - } - return false; - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionSupport.java b/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionSupport.java deleted file mode 100644 index d3c2154a40..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/HibernateSessionSupport.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.util.List; - -public interface HibernateSessionSupport -{ - public void mark(); - - public void mark(String label); - - public void reset(); - - public void reset(String label); - - public void removeMark(); - - public void removeMark(String label); - - public void resetAndRemoveMark(); - - public void resetAndRemoveMark(String label); - - public List getMarks(); - - public String getCurrentMark(); -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/LifecycleAdapter.java b/source/java/org/alfresco/repo/domain/hibernate/LifecycleAdapter.java deleted file mode 100644 index 85f8b6491b..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/LifecycleAdapter.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; - -import org.alfresco.error.AlfrescoRuntimeException; -import org.hibernate.CallbackException; -import org.hibernate.Session; -import org.hibernate.classic.Lifecycle; - -/** - * Helper base class providing lifecycle and other support - * - * @author Derek Hulley - */ -public abstract class LifecycleAdapter implements Lifecycle -{ - /** Helper */ - private Session session; - - /** - * @return Returns the session that this object was used in - */ - protected Session getSession() - { - if (session == null) - { - throw new AlfrescoRuntimeException("Hibernate entity is not part of a session: " + this); - } - return session; - } - - /** - * @return Returns NO_VETO always - */ - public boolean onDelete(Session session) throws CallbackException - { - return NO_VETO; - } - - /** NO OP */ - public void onLoad(Session session, Serializable id) - { - this.session = session; - } - - /** @return Returns NO_VETO always */ - public boolean onSave(Session session) throws CallbackException - { - this.session = session; - return NO_VETO; - } - - /** @return Returns NO_VETO always */ - public boolean onUpdate(Session session) throws CallbackException - { - this.session = session; - return NO_VETO; - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/LocaleUserType.java b/source/java/org/alfresco/repo/domain/hibernate/LocaleUserType.java deleted file mode 100644 index 34b79c3a8a..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/LocaleUserType.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Types; -import java.util.Locale; - -import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter; -import org.alfresco.util.EqualsHelper; -import org.hibernate.HibernateException; -import org.hibernate.usertype.UserType; - -/** - * Custom type to hide the persistence of {@link java.util.Locale locale} instances. - * - * @author Derek Hulley - */ -public class LocaleUserType implements UserType -{ - private static int[] SQL_TYPES = new int[] {Types.VARCHAR}; - - public Class returnedClass() - { - return Locale.class; - } - - /** - * @see #SQL_TYPES - */ - public int[] sqlTypes() - { - return SQL_TYPES; - } - - public boolean isMutable() - { - return false; - } - - public boolean equals(Object x, Object y) throws HibernateException - { - return EqualsHelper.nullSafeEquals(x, y); - } - - public int hashCode(Object x) throws HibernateException - { - return x.hashCode(); - } - - public Object deepCopy(Object value) throws HibernateException - { - // the qname is immutable - return value; - } - - public Object nullSafeGet(ResultSet rs, String[] names, Object owner) throws HibernateException, SQLException - { - String localeStr = rs.getString(names[0]); - if (localeStr == null) - { - return null; - } - else - { - Locale locale = DefaultTypeConverter.INSTANCE.convert(Locale.class, localeStr); - return locale; - } - } - - public void nullSafeSet(PreparedStatement stmt, Object value, int index) throws HibernateException, SQLException - { - // we want to ensure that the value is consistent w.r.t. the use of '_' - if (value == null) - { - stmt.setNull(index, Types.VARCHAR); - } - else - { - String localeStr = value.toString(); - if (localeStr.length() < 6) - { - localeStr += "_"; - } - stmt.setString(index, localeStr); - } - } - - public Object replace(Object original, Object target, Object owner) throws HibernateException - { - // qname is immutable - return original; - } - - public Object assemble(Serializable cached, Object owner) throws HibernateException - { - // qname is serializable - return cached; - } - - public Serializable disassemble(Object value) throws HibernateException - { - // locale is serializable - return (Locale) value; - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml deleted file mode 100644 index f978bcb2d0..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/Node.hbm.xml +++ /dev/null @@ -1,241 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - select - store - from - org.alfresco.repo.domain.hibernate.StoreImpl as store - join store.rootNode - where - store.protocol = :protocol and - store.identifier = :identifier - - - - select - assoc, - parent - from - org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc - join assoc.parent as parent - where - assoc.child.id = :childId - - - - delete - from - org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc - where - assoc.child.id = :childId - - - - delete - from - org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc - where - assoc.parent.id = :parentId - - - - select - assoc - from - org.alfresco.repo.domain.hibernate.ChildAssocImpl as assoc - where - assoc.parent.id = :parentId - order by - assoc.index, - assoc.id - - - - = :minNodeId and - node.deleted = true and - txn.id <= :maxTxnId - order by - node.id asc - ]]> - - - diff --git a/source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java b/source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java deleted file mode 100644 index 251247f6a9..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/NodeImpl.java +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; - -import org.alfresco.repo.domain.AuditableProperties; -import org.alfresco.repo.domain.Node; -import org.alfresco.repo.domain.Store; -import org.alfresco.repo.domain.node.NodePropertyKey; -import org.alfresco.repo.domain.node.NodePropertyValue; -import org.alfresco.repo.domain.node.Transaction; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.namespace.QName; -import org.alfresco.util.EqualsHelper; - -/** - * Bean containing all the persistence data representing a node. - *

- * This implementation of the {@link org.alfresco.repo.domain.Node Node} interface is - * Hibernate specific. - * - * @author Derek Hulley - */ -public class NodeImpl extends LifecycleAdapter implements Node, Serializable -{ - private static final long serialVersionUID = -2101330674810283053L; - - private Long id; - private Long version; - private Store store; - private String uuid; - private Long typeQNameId; - private Long aclId; - private Transaction transaction; - private boolean deleted; - private Map properties; - private AuditableProperties auditableProperties; - - private transient ReadLock refReadLock; - private transient WriteLock refWriteLock; - private transient NodeRef nodeRef; - private transient QName typeQName; - - public NodeImpl() - { - ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - refReadLock = lock.readLock(); - refWriteLock = lock.writeLock(); - - properties = new HashMap(5); - // Note auditableProperties starts null, as hibernate maps a component containing nulls to null and this would - // cause a lot of dirty checks to fail! - } - - /** - * Thread-safe caching of the reference is provided - */ - public NodeRef getNodeRef() - { - // first check if it is available - refReadLock.lock(); - try - { - if (nodeRef != null) - { - return nodeRef; - } - } - finally - { - refReadLock.unlock(); - } - // get write lock - refWriteLock.lock(); - try - { - // double check - if (nodeRef == null ) - { - nodeRef = new NodeRef(getStore().getStoreRef(), getUuid()); - } - return nodeRef; - } - finally - { - refWriteLock.unlock(); - } - } - - public QName getTypeQName(QNameDAO qnameDAO) - { - refReadLock.lock(); - try - { - if (typeQName != null) - { - return typeQName; - } - } - finally - { - refReadLock.unlock(); - } - refWriteLock.lock(); - try - { - typeQName = qnameDAO.getQName(typeQNameId).getSecond(); - return typeQName; - } - finally - { - refWriteLock.unlock(); - } - } - - public void setTypeQName(QNameDAO qnameDAO, QName qname) - { - refWriteLock.lock(); - try - { - Long typeQNameId = qnameDAO.getOrCreateQName(qname).getFirst(); - setTypeQNameId(typeQNameId); - } - finally - { - refWriteLock.unlock(); - } - } - - /** - * @see #getNodeRef() - */ - public String toString() - { - StringBuilder sb = new StringBuilder(50); - sb.append("Node") - .append("[id=").append(id) - .append(", ref=").append(getNodeRef()) - .append(", txn=").append(transaction) - .append(", deleted=").append(deleted) - .append("]"); - return sb.toString(); - } - - public boolean equals(Object obj) - { - if (obj == null) - { - return false; - } - else if (obj == this) - { - return true; - } - else if (!(obj instanceof Node)) - { - return false; - } - Node that = (Node) obj; - if (EqualsHelper.nullSafeEquals(id, that.getId())) - { - return true; - } - else - { - return (this.getNodeRef().equals(that.getNodeRef())); - } - } - - public int hashCode() - { - return getUuid().hashCode(); - } - - public Long getId() - { - return id; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setId(Long id) - { - this.id = id; - } - - public Store getStore() - { - return store; - } - - public void setStore(Store store) - { - refWriteLock.lock(); - try - { - this.store = store; - this.nodeRef = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public String getUuid() - { - return uuid; - } - - public void setUuid(String uuid) - { - refWriteLock.lock(); - try - { - this.uuid = uuid; - this.nodeRef = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public Long getVersion() - { - return version; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setVersion(Long version) - { - this.version = version; - } - - public Transaction getTransaction() - { - return transaction; - } - - public void setTransaction(Transaction transaction) - { - this.transaction = transaction; - } - - public boolean getDeleted() - { - return deleted; - } - - public void setDeleted(boolean deleted) - { - this.deleted = deleted; - } - - public Long getTypeQNameId() - { - return typeQNameId; - } - - public void setTypeQNameId(Long typeQNameId) - { - refWriteLock.lock(); - try - { - this.typeQNameId = typeQNameId; - this.typeQName = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public Long getAclId() - { - return aclId; - } - - public void setAclId(Long aclId) - { - this.aclId = aclId; - } - - public Map getProperties() - { - return properties; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setProperties(Map properties) - { - this.properties = properties; - } - - public AuditableProperties getAuditableProperties() - { - return auditableProperties; - } - - public void setAuditableProperties(AuditableProperties auditableProperties) - { - this.auditableProperties = auditableProperties; - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java b/source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java deleted file mode 100644 index 912376340c..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/ServerImpl.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; - -import org.alfresco.repo.domain.Server; - -/** - * Bean containing all the persistence data representing a Server. - *

- * This implementation of the {@link org.alfresco.repo.domain.Service Service} interface is - * Hibernate specific. - * - * @author Derek Hulley - */ -public class ServerImpl extends LifecycleAdapter implements Server, Serializable -{ - private static final long serialVersionUID = 8063452519040344479L; - - private Long id; - private Long version; - private String ipAddress; - - public ServerImpl() - { - } - - @Override - public String toString() - { - StringBuilder sb = new StringBuilder(50); - sb.append("Server") - .append("[id=").append(id) - .append(", ipAddress=").append(ipAddress) - .append("]"); - return sb.toString(); - } - - public Long getId() - { - return id; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setId(Long id) - { - this.id = id; - } - - public String getIpAddress() - { - return ipAddress; - } - - public Long getVersion() - { - return version; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setVersion(Long version) - { - this.version = version; - } - - public void setIpAddress(String ipAddress) - { - this.ipAddress = ipAddress; - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/SessionSizeResourceManager.java b/source/java/org/alfresco/repo/domain/hibernate/SessionSizeResourceManager.java deleted file mode 100644 index 706292b4ba..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/SessionSizeResourceManager.java +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.lang.reflect.Method; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.alfresco.repo.transaction.AlfrescoTransactionSupport.TxnReadState; -import org.alfresco.util.resource.MethodResourceManager; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.hibernate.Session; -import org.hibernate.engine.EntityKey; -import org.hibernate.stat.SessionStatistics; -import org.springframework.orm.hibernate3.support.HibernateDaoSupport; - -/** - * A Hibernate-specific resource manager that ensures that the current Session's - * entity count doesn't exceed a given threshold. - *

- * NOTE: VERY IMPORTANT
- * Do not, under any circumstances, attach an instance of this class to an API that - * passes stateful objects back and forth. There must be no Session-linked - * objects up the stack from where this instance resides. Failure to observe this will - * most likely result in data loss of a sporadic nature. - * - * @see org.alfresco.repo.domain.hibernate.HibernateNodeTest#testPostCommitClearIssue() - * - * @author Derek Hulley - */ -public class SessionSizeResourceManager extends HibernateDaoSupport implements MethodResourceManager -{ - /** key to store the local flag to disable resource control during the current transaction */ - private static final String KEY_DISABLE_IN_TRANSACTION = "SessionSizeResourceManager.DisableInTransaction"; - - private static Log logger = LogFactory.getLog(SessionSizeResourceManager.class); - - /** Default 1000 */ - private int writeThreshold; - /** Default 10000 */ - private int readThreshold; - /** Default 3 */ - private int retentionFactor; - - /** - * Disable resource management for the duration of the current transaction. This is temporary - * and relies on an active transaction. - */ - public static void setDisableInTransaction() - { - AlfrescoTransactionSupport.bindResource(KEY_DISABLE_IN_TRANSACTION, Boolean.TRUE); - } - - /** - * Enable resource management for the duration of the current transaction. This is temporary - * and relies on an active transaction. - */ - public static void setEnableInTransaction() - { - AlfrescoTransactionSupport.bindResource(KEY_DISABLE_IN_TRANSACTION, Boolean.FALSE); - } - - /** - * @return Returns true if the resource management must be ignored in the current transaction. - * If false, the global setting will take effect. - * - * @see #setDisableInTransaction() - */ - public static boolean isDisableInTransaction() - { - Boolean disableInTransaction = (Boolean) AlfrescoTransactionSupport.getResource(KEY_DISABLE_IN_TRANSACTION); - if (disableInTransaction == null || disableInTransaction == Boolean.FALSE) - { - return false; - } - else - { - return true; - } - } - - /** - * Default public constructor required for bean instantiation. - */ - public SessionSizeResourceManager() - { - this.writeThreshold = 1000; - this.readThreshold = 10000; - this.retentionFactor = 3; - } - - /** - * Set the number of entities retained in the session for each one flushed; default 3. - * Set this to zero to remove all entities when the session is trimmed. - * - * @param retentionFactor the number of entities to keep for each entity removed - */ - public void setRetentionFactor(int retentionFactor) - { - this.retentionFactor = retentionFactor; - } - - /** - * Set the {@link Session#clear()} threshold for read-only transactions. - * If the number of entities and collections in the current session exceeds this number, - * then the session will be cleared. - *

- * Have you read the disclaimer? - * - * @param threshold the maximum number of entities and associations to keep in memory during read-only operations - * - * @see #writeThreshold - */ - public void setReadThreshold(int threshold) - { - this.readThreshold = threshold; - } - - /** - * Set the {@link Session#clear()} threshold for read-write transactions. - * If the number of entities and collections in the current session exceeds this number, - * then the session will be cleared. - *

- * Have you read the disclaimer? - * - * @param threshold the maximum number of entities and associations to keep in memory during write operations - * - * @see #writeThreshold - */ - public void setWriteThreshold(int threshold) - { - this.writeThreshold = threshold; - } - - public static final String KEY_COMMIT_STARTED = "SessionSizeResourceManager.commitStarted"; - public static void setCommitStarted() - { - AlfrescoTransactionSupport.bindResource(KEY_COMMIT_STARTED, Boolean.TRUE); - } - - public void manageResources( - Map methodStatsByMethod, - long transactionElapsedTimeNs, - Method currentMethod) - { - if (isDisableInTransaction()) - { - // Don't do anything - return; - } - int threshold = writeThreshold; - int retentionFactor = 0; - Boolean commitStarted = (Boolean) AlfrescoTransactionSupport.getResource(KEY_COMMIT_STARTED); - if (commitStarted != null || - AlfrescoTransactionSupport.getTransactionReadState() == TxnReadState.TXN_READ_ONLY) - { - threshold = readThreshold; - retentionFactor = this.retentionFactor; // Retain objects during read-only phase only - } - // We are go for interfering - Session session = getSession(false); - SessionStatistics stats = session.getStatistics(); - int entityCount = stats.getEntityCount(); - int collectionCount = stats.getCollectionCount(); - if ((entityCount + collectionCount) > threshold) - { - DirtySessionMethodInterceptor.flushSession(session, true); - selectivelyClear(session, stats, retentionFactor); - // session.clear(); - if (logger.isDebugEnabled()) - { - String msg = String.format( - "Cleared %5d entities and %5d collections from Hibernate Session (threshold %5d)", - entityCount, - collectionCount, - threshold); - logger.debug(msg); - } - } - } - - /** - * Clear the session now. - * - * @param session - */ - public static void clear(Session session) - { - SessionStatistics stats = session.getStatistics(); - selectivelyClear(session, stats, 0); - } - - @SuppressWarnings("unchecked") - private static void selectivelyClear(Session session, SessionStatistics stats, int retentionFactor) - { - if (logger.isDebugEnabled()) - { - logger.debug(stats); - } - Set keys = new HashSet((Set)stats.getEntityKeys()); - int retentionCount = 0; - for (EntityKey key : keys) - { - // This should probably be configurable but frankly the nauseous extrusion of Gavin King's - // programmatic alimentary tract (hibernate) will go away before this could make a difference. - String entityName = key.getEntityName(); - if (!entityName.startsWith("org.alfresco")) - { - // Leave non-Alfresco entities alone. JBPM bugs arise due to inconsistent flushing here. - continue; - } - else if (entityName.startsWith("org.alfresco.repo.workflow.jbpm")) - { - // Once again, JBPM flushing issue prevent us from throwing related entities away - continue; - } - else if (entityName.startsWith("org.alfresco.repo.domain.hibernate.QName")) - { - // QNames are heavily used - continue; - } - else if (entityName.startsWith("org.alfresco.repo.domain.hibernate.Store")) - { - // So are Stores - continue; - } - // Do we evict or retain? - if (retentionCount < retentionFactor) - { - retentionCount++; - continue; - } - retentionCount = 0; - // Flush every other instance - Object val = session.get(key.getEntityName(), key.getIdentifier()); - if (val != null) - { - session.evict(val); - } - } - } -} diff --git a/source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java b/source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java deleted file mode 100644 index a2e88da1e4..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/StoreImpl.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; - -import org.alfresco.repo.domain.Node; -import org.alfresco.repo.domain.Store; -import org.alfresco.service.cmr.repository.StoreRef; -import org.alfresco.util.EqualsHelper; - -/** - * Hibernate-specific implementation of the domain entity store. - * - * @author Derek Hulley - */ -public class StoreImpl implements Store, Serializable -{ - private static final long serialVersionUID = -5501292033972362796L; - - private Long id; - private String protocol; - private String identifier; - private Long version; - private Node rootNode; - - private transient ReadLock refReadLock; - private transient WriteLock refWriteLock; - private transient StoreRef storeRef; - - public StoreImpl() - { - ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - refReadLock = lock.readLock(); - refWriteLock = lock.writeLock(); - } - - /** - * Lazily constructs StoreRef instance referencing this entity - */ - public StoreRef getStoreRef() - { - // first check if it is available - refReadLock.lock(); - try - { - if (storeRef != null) - { - return storeRef; - } - } - finally - { - refReadLock.unlock(); - } - // get write lock - refWriteLock.lock(); - try - { - // double check - if (storeRef == null ) - { - storeRef = new StoreRef(protocol, identifier); - } - return storeRef; - } - finally - { - refWriteLock.unlock(); - } - } - - /** - * @see #getStoreRef()() - */ - public String toString() - { - return getStoreRef().toString(); - } - - /** - * @see #getKey() - */ - public boolean equals(Object obj) - { - if (obj == null) - { - return false; - } - else if (obj == this) - { - return true; - } - else if (!(obj instanceof Store)) - { - return false; - } - Store that = (Store) obj; - return EqualsHelper.nullSafeEquals(this.getStoreRef(), that.getStoreRef()); - } - - /** - * @see #getKey() - */ - public int hashCode() - { - return protocol.hashCode() + identifier.hashCode(); - } - - public Long getId() - { - return id; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setId(Long id) - { - this.id = id; - } - - public String getProtocol() - { - return protocol; - } - - public void setProtocol(String protocol) - { - refWriteLock.lock(); - try - { - this.protocol = protocol; - this.storeRef = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public String getIdentifier() - { - return identifier; - } - - public void setIdentifier(String identifier) - { - refWriteLock.lock(); - try - { - this.identifier = identifier; - this.storeRef = null; - } - finally - { - refWriteLock.unlock(); - } - } - - public Long getVersion() - { - return version; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setVersion(Long version) - { - this.version = version; - } - - public Node getRootNode() - { - return rootNode; - } - - public void setRootNode(Node rootNode) - { - this.rootNode = rootNode; - } -} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml b/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml deleted file mode 100644 index cabca7af72..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/Transaction.hbm.xml +++ /dev/null @@ -1,215 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - select - server - from - org.alfresco.repo.domain.hibernate.ServerImpl as server - where - server.ipAddress = :ipAddress - - - - select - txn - from - org.alfresco.repo.domain.hibernate.TransactionImpl as txn - where - txn.id = :txnId - - - - select - min(txn.commitTimeMs) - from - org.alfresco.repo.domain.hibernate.TransactionImpl as txn - - - - select - max(txn.commitTimeMs) - from - org.alfresco.repo.domain.hibernate.TransactionImpl as txn - - - - - - - - = :fromTimeInclusive and - txn.commitTimeMs < :toTimeExclusive and - txn.id not in (:excludeTxnIds) and - txn.server.id not in (:excludeServerIds) - order by - txn.commitTimeMs asc, - txn.id asc - ]]> - - - - = :fromTimeInclusive and - txn.commitTimeMs < :toTimeExclusive and - txn.id not in (:excludeTxnIds) and - txn.server.id not in (:excludeServerIds) - order by - txn.commitTimeMs desc, - txn.id desc - ]]> - - - - - - - - select - count(txn.id) - from - org.alfresco.repo.domain.hibernate.TransactionImpl as txn - - - - select - count(node.uuid) - from - org.alfresco.repo.domain.hibernate.NodeImpl as node - join node.transaction as txn - where - txn.id = :txnId and - node.deleted = false - - - - select - count(node.uuid) - from - org.alfresco.repo.domain.hibernate.NodeImpl as node - join node.transaction as txn - where - txn.id = :txnId and - node.deleted = true - - - - select - node - from - org.alfresco.repo.domain.hibernate.NodeImpl as node - where - node.transaction.id = :txnId and - node.store.protocol = :protocol and - node.store.identifier = :identifier - - - - select - node - from - org.alfresco.repo.domain.hibernate.NodeImpl as node - where - node.transaction.id = :txnId - - - - = :minTxnId and - txn.commitTimeMs <= :maxCommitTime - order by - txn.id asc - ]]> - - - diff --git a/source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java b/source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java deleted file mode 100644 index fc0f05a72e..0000000000 --- a/source/java/org/alfresco/repo/domain/hibernate/TransactionImpl.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.domain.hibernate; - -import java.io.Serializable; -import java.util.Date; - -import org.alfresco.repo.domain.Server; -import org.alfresco.repo.domain.node.Transaction; -import org.springframework.extensions.surf.util.ISO8601DateFormat; - -/** - * Bean containing all the persistence data representing a Transaction. - *

- * This implementation of the {@link org.alfresco.repo.domain.Transaction Transaction} interface is - * Hibernate specific. - * - * @author Derek Hulley - */ -public class TransactionImpl extends LifecycleAdapter implements Transaction, Serializable -{ - private static final long serialVersionUID = -8264339795578077552L; - - private Long id; - private Long version; - private String changeTxnId; - private Long commitTimeMs; - private Server server; - - public TransactionImpl() - { - this.commitTimeMs = Long.valueOf(0); - } - - @Override - public String toString() - { - StringBuilder sb = new StringBuilder(50); - sb.append("Transaction") - .append("[id=").append(id) - .append(", txnTimeMs=").append(commitTimeMs == null ? "---" : ISO8601DateFormat.format(new Date(commitTimeMs))) - .append(", changeTxnId=").append(changeTxnId) - .append("]"); - return sb.toString(); - } - - public Long getId() - { - return id; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setId(Long id) - { - this.id = id; - } - - public Long getVersion() - { - return version; - } - - /** - * For Hibernate use - */ - @SuppressWarnings("unused") - private void setVersion(Long version) - { - this.version = version; - } - - public String getChangeTxnId() - { - return changeTxnId; - } - - public void setChangeTxnId(String changeTransactionId) - { - this.changeTxnId = changeTransactionId; - } - - public Long getCommitTimeMs() - { - return commitTimeMs; - } - - public void setCommitTimeMs(Long commitTimeMs) - { - this.commitTimeMs = commitTimeMs; - } - - public Server getServer() - { - return server; - } - - public void setServer(Server server) - { - this.server = server; - } -} diff --git a/source/java/org/alfresco/repo/domain/node/ChildAssocEntity.java b/source/java/org/alfresco/repo/domain/node/ChildAssocEntity.java index f6765a42e1..75a6e0110d 100644 --- a/source/java/org/alfresco/repo/domain/node/ChildAssocEntity.java +++ b/source/java/org/alfresco/repo/domain/node/ChildAssocEntity.java @@ -63,7 +63,7 @@ public class ChildAssocEntity private Boolean isPrimary; private int assocIndex; - // Suplemental query-related parameters + // Supplemental query-related parameters private List typeQNameIds; private List childNodeNameCrcs; private List childNodeTypeQNameIds; diff --git a/source/java/org/alfresco/repo/domain/patch/PatchDAO.java b/source/java/org/alfresco/repo/domain/patch/PatchDAO.java index 1df0005037..bc456d3842 100644 --- a/source/java/org/alfresco/repo/domain/patch/PatchDAO.java +++ b/source/java/org/alfresco/repo/domain/patch/PatchDAO.java @@ -19,11 +19,15 @@ package org.alfresco.repo.domain.patch; import java.util.List; +import java.util.Map; import org.alfresco.repo.domain.avm.AVMNodeEntity; import org.alfresco.repo.domain.contentdata.ContentDataDAO; import org.alfresco.service.cmr.repository.ContentData; +import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.StoreRef; +import org.alfresco.service.namespace.QName; +import org.alfresco.util.Pair; /** * Additional DAO services for patches @@ -136,4 +140,40 @@ public interface PatchDAO * @return Returns the names of authorities with incorrect CRC values */ public List getAuthoritiesWithNonUtf8Crcs(); + + /** + * @return Returns the number child association rows + */ + public int getChildAssocCount(); + + /** + * The results map contains: + *

+     * 
+        
+        
+        
+        
+        
+        
+        
+       ]]>
+     * 
+ * @param minAssocId the minimum child assoc ID + * @param maxResults the number of child associations to fetch + * @return Returns child associations that need fixing + */ + public List> getChildAssocsForCrcFix(Long minAssocId, int maxResults); + + public int updateChildAssocCrc(Long assocId, Long childNodeNameCrc, Long qnameCrc); + + /** + * Query for a list of nodes that have a given type and share the same name pattern (SQL LIKE syntax) + * + * @param typeQName the node type + * @param namePattern the SQL LIKE pattern + * @return Returns the node ID and node name + */ + public List> getNodesOfTypeWithNamePattern(QName typeQName, String namePattern); } diff --git a/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java b/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java index 271fc1076f..88d0abf1b9 100644 --- a/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java +++ b/source/java/org/alfresco/repo/domain/patch/ibatis/PatchDAOImpl.java @@ -21,6 +21,7 @@ package org.alfresco.repo.domain.patch.ibatis; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -32,7 +33,10 @@ import org.alfresco.repo.domain.CrcHelper; import org.alfresco.repo.domain.avm.AVMNodeEntity; import org.alfresco.repo.domain.patch.AbstractPatchDAOImpl; import org.alfresco.repo.domain.qname.QNameDAO; +import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.StoreRef; +import org.alfresco.service.namespace.QName; +import org.alfresco.util.Pair; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.orm.ibatis.SqlMapClientTemplate; @@ -59,20 +63,24 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl private static final String SELECT_ADM_MAX_NODE_ID = "alfresco.patch.select_admMaxNodeId"; private static final String SELECT_AVM_NODES_WITH_OLD_CONTENT_PROPERTIES = "alfresco.patch.select_avmNodesWithOldContentProperties"; private static final String SELECT_ADM_OLD_CONTENT_PROPERTIES = "alfresco.patch.select_admOldContentProperties"; + private static final String SELECT_USERS_WITHOUT_USAGE_PROP = "alfresco.usage.select_GetUsersWithoutUsageProp"; private static final String SELECT_AUTHORITIES_AND_CRC = "alfresco.patch.select_authoritiesAndCrc"; + private static final String SELECT_PERMISSIONS_ALL_ACL_IDS = "alfresco.permissions.select_AllAclIds"; + private static final String SELECT_PERMISSIONS_USED_ACL_IDS = "alfresco.permissions.select_UsedAclIds"; + private static final String SELECT_CHILD_ASSOCS_COUNT = "alfresco.patch.select_allChildAssocsCount"; + private static final String SELECT_CHILD_ASSOCS_FOR_CRCS = "alfresco.patch.select_allChildAssocsForCrcs"; + private static final String SELECT_NODES_BY_TYPE_AND_NAME_PATTERN = "alfresco.patch.select_nodesByTypeAndNamePattern"; + private static final String UPDATE_ADM_OLD_CONTENT_PROPERTY = "alfresco.patch.update_admOldContentProperty"; private static final String UPDATE_CONTENT_MIMETYPE_ID = "alfresco.patch.update_contentMimetypeId"; private static final String UPDATE_AVM_NODE_LIST_NULLIFY_ACL = "alfresco.avm.update_AVMNodeList_nullifyAcl"; private static final String UPDATE_AVM_NODE_LIST_SET_ACL = "alfresco.avm.update_AVMNodeList_setAcl"; - - private static final String SELECT_USERS_WITHOUT_USAGE_PROP = "alfresco.usage.select_GetUsersWithoutUsageProp"; + private static final String UPDATE_CHILD_ASSOC_CRC = "alfresco.patch.update_childAssocCrc"; private static final String SELECT_PERMISSIONS_MAX_ACL_ID = "alfresco.permissions.select_MaxAclId"; private static final String SELECT_PERMISSIONS_DM_NODE_COUNT = "alfresco.permissions.select_DmNodeCount"; private static final String SELECT_PERMISSIONS_DM_NODE_COUNT_WITH_NEW_ACLS = "alfresco.permissions.select_DmNodeCountWherePermissionsHaveChanged"; - private static final String SELECT_PERMISSIONS_ALL_ACL_IDS = "alfresco.permissions.select_AllAclIds"; - private static final String SELECT_PERMISSIONS_USED_ACL_IDS = "alfresco.permissions.select_UsedAclIds"; private static final String DELETE_PERMISSIONS_UNUSED_ACES = "alfresco.permissions.delete_UnusedAces"; private static final String DELETE_PERMISSIONS_ACL_LIST = "alfresco.permissions.delete_AclList"; private static final String DELETE_PERMISSIONS_ACL_MEMBERS_FOR_ACL_LIST = "alfresco.permissions.delete_AclMembersForAclList"; @@ -438,4 +446,73 @@ public class PatchDAOImpl extends AbstractPatchDAOImpl // Done return results; } + + public int getChildAssocCount() + { + return (Integer) template.queryForObject(SELECT_CHILD_ASSOCS_COUNT); + } + + @SuppressWarnings("unchecked") + public List> getChildAssocsForCrcFix(Long minAssocId, int maxResults) + { + Long qnameId = qnameDAO.getOrCreateQName(ContentModel.PROP_NAME).getFirst(); + + IdsEntity entity = new IdsEntity(); + entity.setIdOne(qnameId); + entity.setIdTwo(minAssocId); + List> results = template.queryForList(SELECT_CHILD_ASSOCS_FOR_CRCS, entity, 0, maxResults); + // Done + return results; + } + + public int updateChildAssocCrc(Long assocId, Long childNodeNameCrc, Long qnameCrc) + { + Map params = new HashMap(); + params.put("id", assocId); + params.put("childNodeNameCrc", childNodeNameCrc); + params.put("qnameCrc", qnameCrc); + return template.update(UPDATE_CHILD_ASSOC_CRC, params); + } + + public List> getNodesOfTypeWithNamePattern(QName typeQName, String namePattern) + { + Pair typeQNamePair = qnameDAO.getQName(typeQName); + if (typeQNamePair == null) + { + // No point querying + return Collections.emptyList(); + } + Long typeQNameId = typeQNamePair.getFirst(); + + Pair propQNamePair = qnameDAO.getQName(ContentModel.PROP_NAME); + if (propQNamePair == null) + { + return Collections.emptyList(); + } + Long propQNameId = propQNamePair.getFirst(); + + Map params = new HashMap(); + params.put("typeQNameId", typeQNameId); + params.put("propQNameId", propQNameId); + params.put("namePattern", namePattern); + + final List> results = new ArrayList>(500); + RowHandler rowHandler = new RowHandler() + { + @SuppressWarnings("unchecked") + public void handleRow(Object rowObject) + { + Map row = (Map) rowObject; + String protocol = (String) row.get("protocol"); + String identifier = (String) row.get("identifier"); + String uuid = (String) row.get("uuid"); + NodeRef nodeRef = new NodeRef(new StoreRef(protocol, identifier), uuid); + String name = (String) row.get("name"); + Pair pair = new Pair(nodeRef, name); + results.add(pair); + } + }; + template.queryWithRowHandler(SELECT_NODES_BY_TYPE_AND_NAME_PATTERN, params, rowHandler); + return results; + } } diff --git a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java index f889238d9b..43b5cd8b2c 100644 --- a/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java +++ b/source/java/org/alfresco/repo/node/BaseNodeServiceTest.java @@ -40,9 +40,6 @@ import org.alfresco.repo.content.MimetypeMap; import org.alfresco.repo.dictionary.DictionaryComponent; import org.alfresco.repo.dictionary.DictionaryDAO; import org.alfresco.repo.dictionary.M2Model; -import org.alfresco.repo.domain.hibernate.ChildAssocImpl; -import org.alfresco.repo.domain.hibernate.NodeImpl; -import org.alfresco.repo.node.db.NodeDaoService; import org.alfresco.repo.node.integrity.IntegrityChecker; import org.alfresco.repo.policy.JavaBehaviour; import org.alfresco.repo.policy.PolicyComponent; @@ -76,7 +73,6 @@ import org.alfresco.util.BaseSpringTest; import org.alfresco.util.GUID; import org.alfresco.util.PropertyMap; import org.apache.commons.collections.map.SingletonMap; -import org.hibernate.Session; import org.springframework.context.ApplicationContext; import sun.security.action.GetBooleanAction; @@ -156,7 +152,6 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest protected TransactionService transactionService; protected RetryingTransactionHelper retryingTransactionHelper; protected AuthenticationComponent authenticationComponent; - protected NodeDaoService nodeDaoService; protected NodeService nodeService; /** populated during setup */ protected NodeRef rootNodeRef; @@ -426,28 +421,6 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest return ret; } - @SuppressWarnings("unchecked") - private int countNodesByReference(NodeRef nodeRef) - { - String query = - "select count(node.uuid)" + - " from " + - NodeImpl.class.getName() + " node" + - " where" + - " node.uuid = ? and" + - " node.deleted = false and" + - " node.store.protocol = ? and" + - " node.store.identifier = ?"; - Session session = getSession(); - List results = session.createQuery(query) - .setString(0, nodeRef.getId()) - .setString(1, nodeRef.getStoreRef().getProtocol()) - .setString(2, nodeRef.getStoreRef().getIdentifier()) - .list(); - Long count = (Long) results.get(0); - return count.intValue(); - } - /** * @return Returns a reference to the created store */ @@ -916,9 +889,7 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest QName.createQName("path1"), ContentModel.TYPE_CONTAINER); NodeRef nodeRef = assocRef.getChildRef(); - // count the nodes with the given id - int count = countNodesByReference(nodeRef); - assertEquals("Unexpected number of nodes present", 1, count); + assertTrue(nodeService.exists(nodeRef)); } public void testLargeStrings() throws Exception @@ -983,8 +954,8 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest NodeRef n8Ref = assocRefs.get(QName.createQName(BaseNodeServiceTest.NAMESPACE, "n6_p_n8")).getChildRef(); // control checks - assertEquals("n6 not present", 1, countNodesByReference(n6Ref)); - assertEquals("n8 not present", 1, countNodesByReference(n8Ref)); + assertTrue("n6 not present", nodeService.exists(n6Ref)); + assertTrue("n8 not present", nodeService.exists(n8Ref)); assertTrue("n8 exists failure", nodeService.exists(n8Ref)); assertEquals("n6 primary parent association not present on n3", 1, countChildrenOfNode(n3Ref)); assertEquals("n6 secondary parent association not present on n4", 1, countChildrenOfNode(n4Ref)); @@ -1000,8 +971,8 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest setComplete(); endTransaction(); - assertEquals("n6 not directly deleted", 0, countNodesByReference(n6Ref)); - assertEquals("n8 not cascade deleted", 0, countNodesByReference(n8Ref)); + assertFalse("n6 not directly deleted", nodeService.exists(n6Ref)); + assertFalse("n8 not cascade deleted", nodeService.exists(n8Ref)); assertEquals("n6 primary parent association not removed from n3", 0, countChildrenOfNode(n3Ref)); assertEquals("n6 secondary parent association not removed from n4", 0, countChildrenOfNode(n4Ref)); assertEquals("n8 secondary parent association not removed from n7", 0, countChildrenOfNode(n7Ref)); @@ -1123,11 +1094,11 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest // delete n1 nodeService.deleteNode(n1Ref); - assertEquals("Node not directly deleted", 0, countNodesByReference(n1Ref)); - assertEquals("Node not cascade deleted", 0, countNodesByReference(n3Ref)); - assertEquals("Node incorrectly cascade deleted", 1, countNodesByReference(n4Ref)); - assertEquals("Node not cascade deleted", 0, countNodesByReference(n6Ref)); - assertEquals("Node not cascade deleted", 0, countNodesByReference(n8Ref)); + assertFalse("Node not directly deleted", nodeService.exists(n1Ref)); + assertFalse("Node not cascade deleted", nodeService.exists(n3Ref)); + assertTrue("Node incorrectly cascade deleted", nodeService.exists(n4Ref)); + assertFalse("Node not cascade deleted", nodeService.exists(n6Ref)); + assertFalse("Node not cascade deleted", nodeService.exists(n8Ref)); // check before delete delete policy has been called assertTrue("n1Ref before delete policy not called", beforeDeleteNodeRefs.contains(n1Ref)); @@ -1263,24 +1234,10 @@ public abstract class BaseNodeServiceTest extends BaseSpringTest } } - - @SuppressWarnings("unchecked") private int countChildrenOfNode(NodeRef nodeRef) { - String query = - "select childAssoc" + - " from " + - ChildAssocImpl.class.getName() + " childAssoc" + - " join childAssoc.parent node" + - " where node.uuid = ? and node.store.protocol = ? and node.store.identifier = ?"; - Session session = getSession(); - List results = session.createQuery(query) - .setString(0, nodeRef.getId()) - .setString(1, nodeRef.getStoreRef().getProtocol()) - .setString(2, nodeRef.getStoreRef().getIdentifier()) - .list(); - int count = results.size(); - return count; + List children = nodeService.getChildAssocs(nodeRef); + return children.size(); } public void testAddBogusChild() throws Exception diff --git a/source/java/org/alfresco/repo/node/FullNodeServiceTest.java b/source/java/org/alfresco/repo/node/FullNodeServiceTest.java index beb912d88d..fc184269b9 100644 --- a/source/java/org/alfresco/repo/node/FullNodeServiceTest.java +++ b/source/java/org/alfresco/repo/node/FullNodeServiceTest.java @@ -30,7 +30,6 @@ import java.util.Map; import javax.transaction.UserTransaction; -import org.alfresco.repo.node.db.DbNodeServiceImpl; import org.alfresco.service.cmr.dictionary.DictionaryException; import org.alfresco.service.cmr.repository.MLText; import org.alfresco.service.cmr.repository.NodeRef; @@ -50,10 +49,6 @@ public class FullNodeServiceTest extends BaseNodeServiceTest { protected NodeService getNodeService() { - // Force cascading - DbNodeServiceImpl dbNodeServiceImpl = (DbNodeServiceImpl) applicationContext.getBean("dbNodeServiceImpl"); - dbNodeServiceImpl.setCascadeInTransaction(true); - return (NodeService) applicationContext.getBean("NodeService"); } diff --git a/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java b/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java index 239d902b3c..e08bd53076 100644 --- a/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java +++ b/source/java/org/alfresco/repo/node/archive/ArchiveAndRestoreTest.java @@ -29,10 +29,8 @@ import javax.transaction.UserTransaction; import junit.framework.TestCase; import org.alfresco.model.ContentModel; -import org.alfresco.repo.domain.hibernate.SessionSizeResourceManager; import org.alfresco.repo.node.StoreArchiveMap; import org.alfresco.repo.node.archive.RestoreNodeReport.RestoreStatus; -import org.alfresco.repo.node.db.DbNodeServiceImpl; import org.alfresco.repo.node.integrity.IntegrityChecker; import org.alfresco.repo.security.authentication.AuthenticationComponent; import org.alfresco.service.ServiceRegistry; @@ -104,10 +102,6 @@ public class ArchiveAndRestoreTest extends TestCase @Override public void setUp() throws Exception { - // Force cascading - DbNodeServiceImpl dbNodeServiceImpl = (DbNodeServiceImpl) ctx.getBean("dbNodeServiceImpl"); - dbNodeServiceImpl.setCascadeInTransaction(true); - ServiceRegistry serviceRegistry = (ServiceRegistry) ctx.getBean("ServiceRegistry"); nodeArchiveService = (NodeArchiveService) ctx.getBean("nodeArchiveService"); nodeService = serviceRegistry.getNodeService(); @@ -471,8 +465,6 @@ public class ArchiveAndRestoreTest extends TestCase */ public void testArchiveVsDeletePerformance() throws Exception { - // Disable the in-transaction flushing - SessionSizeResourceManager.setDisableInTransaction(); // Start by deleting the node structure and then recreating it. // Only measure the delete speed int iterations = 100; diff --git a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java index 511586776c..89eba3b924 100644 --- a/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java +++ b/source/java/org/alfresco/repo/node/db/DbNodeServiceImpl.java @@ -33,7 +33,6 @@ import java.util.Stack; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.model.ContentModel; -import org.alfresco.repo.domain.Node; import org.alfresco.repo.domain.node.ChildAssocEntity; import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.node.NodeDAO.ChildAssocRefQueryCallback; @@ -124,14 +123,6 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl this.nodeIndexer = nodeIndexer; } - /** - * @deprecated the functionality did not see wide enough usage to warrant the maintenance - */ - public void setCascadeInTransaction(boolean cascadeInTransaction) - { - logger.warn("NodeService property 'cascadeInTransaction' is no longer available."); - } - /** * Performs a null-safe get of the node * @@ -170,7 +161,7 @@ public class DbNodeServiceImpl extends AbstractNodeServiceImpl } /** - * @see NodeDaoService#getStores() + * {@inheritDoc} */ public List getStores() { diff --git a/source/java/org/alfresco/repo/node/db/NodeDaoService.java b/source/java/org/alfresco/repo/node/db/NodeDaoService.java deleted file mode 100644 index 7f503716cf..0000000000 --- a/source/java/org/alfresco/repo/node/db/NodeDaoService.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (C) 2005-2009 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.node.db; - -/** - * Service layer accessing persistent node entities directly - * - * @author Derek Hulley - */ -public interface NodeDaoService -{ - /** - * Are there any pending changes which must be synchronized with the store? - * - * @return true => changes are pending - */ - public boolean isDirty(); - - /** - * Flush the data changes to the persistence layer. - */ - public void flush(); -} diff --git a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java b/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java deleted file mode 100644 index 1165cb873a..0000000000 --- a/source/java/org/alfresco/repo/node/db/hibernate/HibernateNodeDaoServiceImpl.java +++ /dev/null @@ -1,2339 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - - * You should have received a copy of the GNU General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - - * As a special exception to the terms and conditions of version 2.0 of - * the GPL, you may redistribute this Program in connection with Free/Libre - * and Open Source Software ("FLOSS") applications as described in Alfresco's - * FLOSS exception. You should have recieved a copy of the text describing - * the FLOSS exception, and it is also available here: - * http://www.alfresco.com/legal/licensing" - */ -package org.alfresco.repo.node.db.hibernate; - -import java.io.Serializable; -import java.io.UnsupportedEncodingException; -import java.lang.reflect.Array; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.zip.CRC32; - -import org.alfresco.error.AlfrescoRuntimeException; -import org.alfresco.error.ExceptionStackUtil; -import org.alfresco.model.ContentModel; -import org.alfresco.repo.cache.SimpleCache; -import org.alfresco.repo.domain.AuditableProperties; -import org.alfresco.repo.domain.ChildAssoc; -import org.alfresco.repo.domain.Node; -import org.alfresco.repo.domain.Server; -import org.alfresco.repo.domain.Store; -import org.alfresco.repo.domain.contentdata.ContentDataDAO; -import org.alfresco.repo.domain.hibernate.ChildAssocImpl; -import org.alfresco.repo.domain.hibernate.DirtySessionMethodInterceptor; -import org.alfresco.repo.domain.hibernate.NodeImpl; -import org.alfresco.repo.domain.hibernate.SessionSizeResourceManager; -import org.alfresco.repo.domain.hibernate.StoreImpl; -import org.alfresco.repo.domain.hibernate.TransactionImpl; -import org.alfresco.repo.domain.locale.LocaleDAO; -import org.alfresco.repo.domain.node.NodeDAO; -import org.alfresco.repo.domain.node.NodePropertyKey; -import org.alfresco.repo.domain.node.NodePropertyValue; -import org.alfresco.repo.domain.node.Transaction; -import org.alfresco.repo.domain.node.NodeDAO.NodeRefQueryCallback; -import org.alfresco.repo.domain.permissions.AclDAO; -import org.alfresco.repo.domain.qname.QNameDAO; -import org.alfresco.repo.node.NodeBulkLoader; -import org.alfresco.repo.node.db.NodeDaoService; -import org.alfresco.repo.policy.BehaviourFilter; -import org.alfresco.repo.security.authentication.AuthenticationUtil; -import org.alfresco.repo.security.permissions.ACLType; -import org.alfresco.repo.security.permissions.AccessControlListProperties; -import org.alfresco.repo.security.permissions.impl.AclChange; -import org.alfresco.repo.transaction.AlfrescoTransactionSupport; -import org.alfresco.repo.transaction.RetryingTransactionHelper; -import org.alfresco.repo.transaction.TransactionAwareSingleton; -import org.alfresco.repo.transaction.TransactionListenerAdapter; -import org.alfresco.repo.transaction.TransactionalDao; -import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.service.cmr.dictionary.AssociationDefinition; -import org.alfresco.service.cmr.dictionary.ChildAssociationDefinition; -import org.alfresco.service.cmr.dictionary.DataTypeDefinition; -import org.alfresco.service.cmr.dictionary.DictionaryException; -import org.alfresco.service.cmr.dictionary.DictionaryService; -import org.alfresco.service.cmr.dictionary.PropertyDefinition; -import org.alfresco.service.cmr.repository.ChildAssociationRef; -import org.alfresco.service.cmr.repository.ContentData; -import org.alfresco.service.cmr.repository.CyclicChildRelationshipException; -import org.alfresco.service.cmr.repository.DuplicateChildNodeNameException; -import org.alfresco.service.cmr.repository.EntityRef; -import org.alfresco.service.cmr.repository.MLText; -import org.alfresco.service.cmr.repository.NodeRef; -import org.alfresco.service.cmr.repository.StoreRef; -import org.alfresco.service.cmr.repository.datatype.TypeConversionException; -import org.alfresco.service.namespace.QName; -import org.alfresco.util.GUID; -import org.alfresco.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.hibernate.CacheMode; -import org.hibernate.Criteria; -import org.hibernate.FlushMode; -import org.hibernate.HibernateException; -import org.hibernate.LockMode; -import org.hibernate.ObjectNotFoundException; -import org.hibernate.Query; -import org.hibernate.ScrollMode; -import org.hibernate.ScrollableResults; -import org.hibernate.Session; -import org.hibernate.criterion.Restrictions; -import org.hibernate.exception.ConstraintViolationException; -import org.hibernate.exception.SQLGrammarException; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.orm.hibernate3.HibernateCallback; -import org.springframework.orm.hibernate3.support.HibernateDaoSupport; - -/** - * Hibernate-specific implementation of the persistence-independent node DAO interface - * - * @author Derek Hulley - */ -public class HibernateNodeDaoServiceImpl extends HibernateDaoSupport implements NodeDaoService, TransactionalDao, - NodeBulkLoader -{ - private static final String QUERY_GET_STORE_BY_ALL = "store.GetStoreByAll"; - private static final String QUERY_GET_PARENT_ASSOCS = "node.GetParentAssocs"; - private static final String QUERY_GET_DELETED_NODES_BY_MAX_TXNID = "node.GetDeletedNodesByMaxTxnId"; - private static final String QUERY_GET_SERVER_BY_IPADDRESS = "server.getServerByIpAddress"; - - private static final Long NULL_CACHE_VALUE = new Long(-1); - - private static Log logger = LogFactory.getLog(HibernateNodeDaoServiceImpl.class); - /** Log to trace parent association caching: classname + .ParentAssocsCache */ - private static Log loggerParentAssocsCache = LogFactory.getLog(HibernateNodeDaoServiceImpl.class.getName() - + ".ParentAssocsCache"); - - /** - * Exceptions that indicate duplicate child names violations. - */ - @SuppressWarnings("unchecked") - public static final Class[] DUPLICATE_CHILD_NAME_EXCEPTIONS; - static - { - DUPLICATE_CHILD_NAME_EXCEPTIONS = new Class[] - { ConstraintViolationException.class, DataIntegrityViolationException.class, SQLGrammarException.class // Hibernate - // misinterprets - // a MS - // SQL - // Server - // exception - }; - } - - /** Used for refactoring of DAO */ - private QNameDAO qnameDAO; - private ContentDataDAO contentDataDAO; - private AclDAO aclDaoComponent; - private LocaleDAO localeDAO; - private DictionaryService dictionaryService; - private boolean enableTimestampPropagation; - private RetryingTransactionHelper auditableTransactionHelper; - private BehaviourFilter behaviourFilter; - /** A cache mapping StoreRef and NodeRef instances to the entity IDs (primary key) */ - private SimpleCache storeAndNodeIdCache; - /** A cache for more performant lookups of the parent associations */ - private SimpleCache parentAssocsCache; - private boolean isDebugEnabled = logger.isDebugEnabled(); - private boolean isDebugParentAssocCacheEnabled = loggerParentAssocsCache.isDebugEnabled(); - - /** a uuid identifying this unique instance */ - private final String uuid; - - private static TransactionAwareSingleton serverIdSingleton = new TransactionAwareSingleton(); - private final String ipAddress; - - /** used for debugging */ - private Set changeTxnIdSet; - - /** - * - */ - public HibernateNodeDaoServiceImpl() - { - this.uuid = GUID.generate(); - try - { - ipAddress = InetAddress.getLocalHost().getHostAddress(); - } - catch (UnknownHostException e) - { - throw new AlfrescoRuntimeException("Failed to get server IP address", e); - } - - changeTxnIdSet = new HashSet(0); - enableTimestampPropagation = true; - } - - /** - * Checks equality by type and uuid - */ - public boolean equals(Object obj) - { - if (obj == null) - { - return false; - } - else if (!(obj instanceof HibernateNodeDaoServiceImpl)) - { - return false; - } - HibernateNodeDaoServiceImpl that = (HibernateNodeDaoServiceImpl) obj; - return this.uuid.equals(that.uuid); - } - - /** - * @see #uuid - */ - public int hashCode() - { - return uuid.hashCode(); - } - - /** - * Set the component for creating QName entities. - */ - public void setQnameDAO(QNameDAO qnameDAO) - { - this.qnameDAO = qnameDAO; - } - - /** - * Set the component for storing and retrieving ContentData - */ - public void setContentDataDAO(ContentDataDAO contentDataDAO) - { - this.contentDataDAO = contentDataDAO; - } - - public void setAclDAO(AclDAO aclDaoComponent) - { - this.aclDaoComponent = aclDaoComponent; - } - - /** - * Set the component for creating Locale entities - */ - public void setLocaleDAO(LocaleDAO localeDAO) - { - this.localeDAO = localeDAO; - } - - /** - * Set the component for querying the dictionary model - */ - public void setDictionaryService(DictionaryService dictionaryService) - { - this.dictionaryService = dictionaryService; - } - - /** - * Enable/disable propagation of timestamps from child to parent nodes.
- * Note: This only has an effect on child associations that use the propagateTimestamps element. - */ - public void setEnableTimestampPropagation(boolean enableTimestampPropagation) - { - this.enableTimestampPropagation = enableTimestampPropagation; - } - - /** - * Set the component to start new transactions when setting auditable properties (timestamps) in the - * post-transaction phase. - */ - public void setAuditableTransactionHelper(RetryingTransactionHelper auditableTransactionHelper) - { - this.auditableTransactionHelper = auditableTransactionHelper; - } - - /** - * Set the component to determine the correct aspect behaviours. This applies particularly to the - * cm:auditable case, where the setting of values is done automatically except when the behaviour is - * disabled. - */ - public void setBehaviourFilter(BehaviourFilter behaviourFilter) - { - this.behaviourFilter = behaviourFilter; - } - - /** - * Ste the transaction-aware cache to store Store and Root Node IDs by Store Reference - * - * @param storeAndNodeIdCache the cache - */ - public void setStoreAndNodeIdCache(SimpleCache storeAndNodeIdCache) - { - this.storeAndNodeIdCache = storeAndNodeIdCache; - } - - /** - * Set the transaction-aware cache to store parent associations by child node id - * - * @param parentAssocsCache the cache - */ - public void setParentAssocsCache(SimpleCache parentAssocsCache) - { - this.parentAssocsCache = parentAssocsCache; - } - - /** - * @return Returns the ID of this instance's server instance or null - */ - private Long getServerIdOrNull() - { - Long serverId = serverIdSingleton.get(); - if (serverId != null) - { - return serverId; - } - // Query for it - // The server already exists, so get it - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_SERVER_BY_IPADDRESS) - .setString("ipAddress", ipAddress); - return query.uniqueResult(); - } - }; - Server server = (Server) getHibernateTemplate().execute(callback); - if (server != null) - { - // It exists, so just return the ID - return server.getId(); - } - else - { - return null; - } - } - - public void beforeCommit() - { - throw new UnsupportedOperationException(); - } - - public Long getCurrentTransactionId() - { - throw new UnsupportedOperationException(); - } - - /** - * Does this Session contain any changes which must be synchronized with the store? - * - * @return true => changes are pending - */ - public boolean isDirty() - { - // create a callback for the task - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - return session.isDirty(); - } - }; - // execute the callback - return ((Boolean) getHibernateTemplate().execute(callback)).booleanValue(); - } - - /** - * Just flushes the session - */ - public void flush() - { - getSession().flush(); - } - - /** - * @return Returns the Store entity or null - */ - private Store getStore(final StoreRef storeRef) - { - // Look it up in the cache - Long storeId = storeAndNodeIdCache.get(storeRef); - // Load it - if (storeId != null) - { - // Check for null persistence (previously missed value) - if (storeId.equals(NULL_CACHE_VALUE)) - { - // There is no such value matching - return null; - } - // Don't use the method that throws an exception as the cache might be invalid. - Store store = (Store) getSession().get(StoreImpl.class, storeId); - if (store == null) - { - // It is not available, so we need to go the query route. - // But first remove the cache entry - storeAndNodeIdCache.remove(storeRef); - // Recurse, but this time there is no cache entry - return getStore(storeRef); - } - else - { - return store; - } - } - // Query for it - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_STORE_BY_ALL).setString( - "protocol", storeRef.getProtocol()).setString("identifier", storeRef.getIdentifier()); - return query.uniqueResult(); - } - }; - Store store = (Store) getHibernateTemplate().execute(callback); - if (store == null) - { - // Persist the null entry - storeAndNodeIdCache.put(storeRef, NULL_CACHE_VALUE); - } - else - { - storeAndNodeIdCache.put(storeRef, store.getId()); - } - // done - return store; - } - - /** - * Fetch the node. If the ID is invalid, we assume that the state of the current session is invalid i.e. the data is - * stale - * - * @param nodeId the node's ID - * @return the node - * @throws ObjectNotFoundException if the ID doesn't refer to a node. - */ - private Node getNodeNotNull(Long nodeId) - { - Node node = (Node) getHibernateTemplate().get(NodeImpl.class, nodeId); - if (node == null) - { - throw new ObjectNotFoundException(nodeId, NodeImpl.class.getName()); - } - return node; - } - - /** - * Fetch the node. If the ID is invalid, null is returned. - * - * @param nodeId the node's ID - * @return the node - * @throws ObjectNotFoundException if the ID doesn't refer to a node. - */ - private Node getNodeOrNull(Long nodeId) - { - Node node = (Node) getHibernateTemplate().get(NodeImpl.class, nodeId); - return node; - } - - /** - * Fetch the child assoc. If the ID is invalid, we assume that the state of the current session is invalid i.e. the - * data is stale - * - * @param childAssocId the assoc's ID - * @return the assoc - * @throws AlfrescoRuntimeException if the ID doesn't refer to an assoc. - */ - private ChildAssoc getChildAssocNotNull(Long childAssocId) - { - ChildAssoc assoc = (ChildAssoc) getHibernateTemplate().get(ChildAssocImpl.class, childAssocId); - if (assoc == null) - { - throw new ObjectNotFoundException(childAssocId, ChildAssocImpl.class.getName()); - } - return assoc; - } - - private static final String UNKNOWN_USER = "unknown"; - - private String getCurrentUser() - { - String user = AuthenticationUtil.getFullyAuthenticatedUser(); - return (user == null) ? UNKNOWN_USER : user; - } - - /** - * Sets the timestamps for nodes set during the transaction. - *

- * The implementation attempts to propagate the timestamps in the same transaction, but during periods of high - * concurrent modification to children of a particular parent node, the contention-resolution at the database can - * lead to delays in the processes. When this occurs, the process is pushed to after the transaction for an - * arbitrary period of time, after which the server will again attempt to do the work in the transaction. - * - * @author Derek Hulley - */ - private class TimestampPropagator extends TransactionListenerAdapter implements - RetryingTransactionCallback - { - private final Set nodeIds; - - private TimestampPropagator() - { - this.nodeIds = new HashSet(23); - } - - public void addNode(Long nodeId) - { - nodeIds.add(nodeId); - } - - @Override - public void afterCommit() - { - if (nodeIds.size() == 0) - { - return; - } - // Execute using the explicit transaction attributes - try - { - auditableTransactionHelper.doInTransaction(this, false, true); - } - catch (Throwable e) - { - logger.info("Failed to update auditable properties for nodes: " + nodeIds); - } - } - - public Integer execute() throws Throwable - { - long now = System.currentTimeMillis(); - return executeImpl(now, true); - } - - private Integer executeImpl(long now, boolean isPostTransaction) throws Throwable - { - if (logger.isDebugEnabled()) - { - logger.debug("Updating timestamps for nodes: " + nodeIds); - } - Session session = getSession(); - final Date modifiedDate = new Date(now); - final String modifier = getCurrentUser(); - int count = 0; - for (final Long nodeId : nodeIds) - { - Node node = getNodeOrNull(nodeId); - if (node == null) - { - continue; - } - AuditableProperties auditableProperties = node.getAuditableProperties(); - if (auditableProperties == null) - { - // Don't bother setting anything if there are no values - continue; - } - // Only set the value if our modified date is later - Date currentModifiedDate = (Date) auditableProperties.getAuditableProperty(ContentModel.PROP_MODIFIED); - if (currentModifiedDate != null && currentModifiedDate.compareTo(modifiedDate) >= 0) - { - // The value on the node is greater - continue; - } - // Lock it - session.lock(node, LockMode.UPGRADE_NOWAIT); // Might fail immediately, but that is better than waiting - auditableProperties.setAuditValues(modifier, modifiedDate, false); - count++; - if (count % 1000 == 0) - { - DirtySessionMethodInterceptor.flushSession(session); - SessionSizeResourceManager.clear(session); - } - } - return new Integer(count); - } - } - - private static final String RESOURCE_KEY_TIMESTAMP_PROPAGATOR = "hibernate.timestamp.propagator"; - - /** - * Ensures that the timestamps are propogated to the parent node of the association, but only if the association - * requires it. - */ - private void propagateTimestamps(ParentAssocInfo parentAssocPair) - { - // Shortcut - if (!enableTimestampPropagation) - { - return; - } - QName assocTypeQName = parentAssocPair.getChildAssociationRef().getTypeQName(); - AssociationDefinition assocDef = dictionaryService.getAssociation(assocTypeQName); - if (assocDef == null) - { - // Not found, so just ignore - return; - } - else if (!assocDef.isChild()) - { - // Unexpected, but not our immediate concern - return; - } - ChildAssociationDefinition childAssocDef = (ChildAssociationDefinition) assocDef; - // Do we send timestamps up? - if (!childAssocDef.getPropagateTimestamps()) - { - return; - } - // We have to update the parent - TimestampPropagator propagator = (TimestampPropagator) AlfrescoTransactionSupport - .getResource(RESOURCE_KEY_TIMESTAMP_PROPAGATOR); - if (propagator == null) - { - propagator = new TimestampPropagator(); - AlfrescoTransactionSupport.bindListener(propagator); - } - propagator.addNode(parentAssocPair.getParentNodeId()); - } - - private long getCrc(String str) - { - CRC32 crc = new CRC32(); - try - { - crc.update(str.getBytes("UTF-8")); // https://issues.alfresco.com/jira/browse/ALFCOM-1335 - } - catch (UnsupportedEncodingException e) - { - throw new RuntimeException("UTF-8 encoding is not supported"); - } - return crc.getValue(); - } - - private static final String TRUNCATED_NAME_INDICATOR = "~~~"; - - private String getShortName(String str) - { - int length = str.length(); - if (length <= 50) - { - return str; - } - else - { - StringBuilder ret = new StringBuilder(50); - ret.append(str.substring(0, 47)).append(TRUNCATED_NAME_INDICATOR); - return ret.toString(); - } - } - - /** - * Explicitly flushes the session looking out for {@link #DUPLICATE_CHILD_NAME_EXCEPTIONS exceptions} indicating - * that the child association name constraint has been violated. - *

- * NOTE: The Hibernate session will be flushed prior to calling the callback. This is necessary to prevent - * legitimate other contstraint violations from being dressed up as {@link DuplicateChildNodeNameException}. - * - * @param childAssocChangingCallback the callback in which the child assoc is modified - * @return Returns the callback's result - */ - private Object writeChildAssocChanges( - HibernateCallback childAssocChangingCallback, - NodeRef parentNodeRef, - QName assocTypeQName, - String childName) - { - // Make sure there are no outstanding changes to flush - DirtySessionMethodInterceptor.flushSession(getSession(false)); - // Call the callback and dig into any exception - try - { - Object ret = getHibernateTemplate().execute(childAssocChangingCallback); - // Now flush. Note that we *force* it to flush as the dirty flag will not have been set. - DirtySessionMethodInterceptor.flushSession(getSession(false), true); - // No clashes - return ret; - } - catch (Throwable e) - { - Throwable constraintViolation = (Throwable) ExceptionStackUtil.getCause(e, DUPLICATE_CHILD_NAME_EXCEPTIONS); - if (constraintViolation == null) - { - // It was something else - RuntimeException ee = AlfrescoRuntimeException.makeRuntimeException(e, - "Exception while flushing child assoc to database"); - throw ee; - } - else if (constraintViolation instanceof SQLGrammarException) - { - SQLGrammarException sqlge = (SQLGrammarException) constraintViolation; - if (sqlge.getMessage().contains("isolation") || sqlge.getCause().getMessage().contains("isolation")) - { - // This will do to cover ETHREEOH-3170 - } - else - { - // It was something else - RuntimeException ee = AlfrescoRuntimeException.makeRuntimeException(e, - "Exception while flushing child assoc to database"); - throw ee; - } - } - // We caught an exception that indicates a duplicate child - if (isDebugEnabled) - { - logger.debug("Duplicate child association detected: \n" + " Parent node: " + parentNodeRef + "\n" - + " Child node name: " + childName, e); - } - throw new DuplicateChildNodeNameException(parentNodeRef, assocTypeQName, childName); - } - } - - /** - * Apply the cm:name to the child association. If the child name is null then a GUID is generated as - * a substitute. - * - * @param childName the cm:name applying to the association. - */ - private Pair getChildNameUnique(QName assocTypeQName, String childName) - { - String childNameNewShort; // - long childNameNewCrc = -1L; // By default, they don't compete - - if (childName == null) - { - childNameNewShort = GUID.generate(); - childNameNewCrc = -1L * getCrc(childNameNewShort); - } - else - { - AssociationDefinition assocDef = dictionaryService.getAssociation(assocTypeQName); - if (!assocDef.isChild()) - { - childNameNewShort = GUID.generate(); - childNameNewCrc = -1L * getCrc(childNameNewShort); - } - else - { - ChildAssociationDefinition childAssocDef = (ChildAssociationDefinition) assocDef; - if (childAssocDef.getDuplicateChildNamesAllowed()) - { - childNameNewShort = GUID.generate(); - childNameNewCrc = -1L * getCrc(childNameNewShort); - } - else - { - String childNameNewLower = childName.toLowerCase(); - childNameNewShort = getShortName(childNameNewLower); - childNameNewCrc = getCrc(childNameNewLower); - } - } - } - return new Pair(childNameNewShort, childNameNewCrc); - } - - private Pair updateChildAssoc( - Long childAssocId, - Long parentNodeId, - Long childNodeId, - final QName assocTypeQName, - final QName assocQName, - final int index, - String childName) - { - final ChildAssoc childAssoc = getChildAssocNotNull(childAssocId); - final boolean isPrimary = childAssoc.getIsPrimary(); - final Node oldParentNode = childAssoc.getParent(); - final Node oldChildNode = childAssoc.getChild(); - final NodeRef oldChildNodeRef = childAssoc.getChild().getNodeRef(); - final Node newParentNode = getNodeNotNull(parentNodeId); - final Node newChildNode = getNodeNotNull(childNodeId); - final NodeRef newChildNodeRef = newChildNode.getNodeRef(); - final Pair childNameUnique = getChildNameUnique(assocTypeQName, childName); - - // Reset the cm:name duplicate handling. This has to be redone, if required. - HibernateCallback updateChildAssocCallback = new HibernateCallback() - { - public Object doInHibernate(Session session) throws HibernateException, SQLException - { - childAssoc.setChildNodeName(childNameUnique.getFirst()); - childAssoc.setChildNodeNameCrc(childNameUnique.getSecond()); - - childAssoc.buildAssociation(newParentNode, newChildNode); - childAssoc.setTypeQName(qnameDAO, assocTypeQName); - childAssoc.setQName(qnameDAO, assocQName); - if (index >= 0) - { - childAssoc.setIndex(index); - } - return null; - } - }; - writeChildAssocChanges(updateChildAssocCallback, newParentNode.getNodeRef(), assocTypeQName, childNameUnique - .getFirst()); - - // Record change ID - if (oldChildNodeRef.equals(newChildNodeRef)) - { -// recordNodeUpdate(newChildNode); - } - else - { -// recordNodeUpdate(newChildNode); - } - - // Update the inherited associations if either the parent or child nodes have changed and - // the association is primary - if (isPrimary && (!oldParentNode.getId().equals(parentNodeId) || !oldChildNode.getId().equals(childNodeId))) - { - Long newChildNodeAclId = newChildNode.getAclId(); - if (newChildNodeAclId != null) - { - Long targetAclId = newChildNodeAclId; - AccessControlListProperties aclProperties = aclDaoComponent.getAccessControlListProperties(targetAclId); - Boolean targetAclInherits = aclProperties.getInherits(); - if ((targetAclInherits != null) && (targetAclInherits.booleanValue())) - { - Long newParentNodeAclId = newParentNode.getAclId(); - if (newParentNodeAclId != null) - { - Long parentAclId = newParentNodeAclId; - Long inheritedAclId = aclDaoComponent.getInheritedAccessControlList(parentAclId); - if (aclProperties.getAclType() == ACLType.DEFINING) - { - aclDaoComponent.enableInheritance(targetAclId, parentAclId); - } - else if (aclProperties.getAclType() == ACLType.SHARED) - { - setFixedAcls(childNodeId, inheritedAclId, true, null); - } - } - else - { - if (aclProperties.getAclType() == ACLType.DEFINING) - { - // there is nothing to inherit from so clear out any inherited aces - aclDaoComponent.deleteInheritedAccessControlEntries(targetAclId); - } - else if (aclProperties.getAclType() == ACLType.SHARED) - { - // there is nothing to inherit - newChildNode.setAclId(null); - - // TODO - will be refactored out (ensure node.aclId change is flushed) - flush(); - } - - // throw new IllegalStateException("Share bug"); - } - } - } - else - { - // FIXME: dead code ? - if (newChildNodeAclId != null) - { - Long parentAcl = newChildNodeAclId; - Long inheritedAcl = aclDaoComponent.getInheritedAccessControlList(parentAcl); - setFixedAcls(childNodeId, inheritedAcl, true, null); - } - } - } - - // Done - parentAssocsCache.remove(oldChildNode.getId()); - parentAssocsCache.remove(childNodeId); - ParentAssocInfo parentAssocInfo = new ParentAssocInfo(childAssoc, qnameDAO); - return new Pair(childAssocId, parentAssocInfo.getChildAssociationRef()); - } - - /** - * This code is here, and not in another DAO, in order to avoid unnecessary circular callbacks and cyclical - * dependencies. It would be nice if the ACL code could be separated (or combined) but the node tree walking code is - * best done right here. - */ - private void setFixedAcls(final Long nodeId, final Long mergeFromAclId, final boolean set, Set processedNodes) - { - // ETHREEOH-3088: Cut/Paste into same hierarchy - if (processedNodes == null) - { - processedNodes = new HashSet(3); - } - if (!processedNodes.add(nodeId)) - { - logger.error("Cyclic parent-child relationship detected: \n" + " current node: " + nodeId); - throw new CyclicChildRelationshipException("Node has been pasted into its own tree.", null); - } - - Node mergeFromNode = getNodeNotNull(nodeId); - - if (set) - { - AccessControlListProperties mergeFromAcl = aclDaoComponent.getAccessControlListProperties(mergeFromAclId); - mergeFromNode.setAclId(mergeFromAcl.getId()); - - // TODO - will be refactored out (ensure node.aclId change is flushed) - flush(); - } - - final List childNodeIds = new ArrayList(100); - NodeDAO.ChildAssocRefQueryCallback callback = new NodeDAO.ChildAssocRefQueryCallback() - { - public boolean handle( - Pair childAssocPair, - Pair parentNodePair, - Pair childNodePair) - { - // Ignore non-primary nodes - if (!childAssocPair.getSecond().isPrimary()) - { - return false; - } - childNodeIds.add(childNodePair.getFirst()); - return false; - } - - public boolean preLoadNodes() - { - return true; - } - }; - // Get all child associations with the specific qualified name -// nodeDAO.getChildAssocs(nodeId, null, (QName) null, (QName) null, Boolean.TRUE, null, callback); - for (Long childNodeId : childNodeIds) - { - Node childNode = getNodeNotNull(childNodeId); - AccessControlListProperties acl = aclDaoComponent.getAccessControlListProperties(childNode.getAclId()); - - if (acl == null) - { - setFixedAcls(childNodeId, mergeFromAclId, true, processedNodes); - } - else if (acl.getAclType() == ACLType.LAYERED) - { - logger.error("LAYERED ACL present on ADM node: " + childNode); - continue; - } - else if (acl.getAclType() == ACLType.DEFINING) - { - @SuppressWarnings("unused") - List newChanges = aclDaoComponent.mergeInheritedAccessControlList(mergeFromAclId, acl - .getId()); - } - else - { - setFixedAcls(childNodeId, mergeFromAclId, true, processedNodes); - } - } - } - - /** - * {@inheritDoc} - *

- * Clears the L1 cache, the parentAssocsCache and storeAndNodeIdCache - */ - public void clear() - { - Session session = getSession(); - DirtySessionMethodInterceptor.flushSession(session, true); - session.clear(); - parentAssocsCache.clear(); - storeAndNodeIdCache.clear(); - } - - /** - * {@inheritDoc} - *

- * Loads properties, aspects, parent associations and the ID-noderef cache - */ - public void cacheNodes(List nodeRefs) - { - if (nodeRefs.size() == 0) - { - // Nothing to cache - return; - } - // Group the nodes by store so that we don't *have* to eagerly join to store to get query performance - Map> uuidsByStore = new HashMap>(3); - for (NodeRef nodeRef : nodeRefs) - { - StoreRef storeRef = nodeRef.getStoreRef(); - List uuids = (List) uuidsByStore.get(storeRef); - if (uuids == null) - { - uuids = new ArrayList(nodeRefs.size()); - uuidsByStore.put(storeRef, uuids); - } - uuids.add(nodeRef.getId()); - } - int size = nodeRefs.size(); - nodeRefs = null; - // Now load all the nodes - for (Map.Entry> entry : uuidsByStore.entrySet()) - { - StoreRef storeRef = entry.getKey(); - List uuids = entry.getValue(); - cacheNodes(storeRef, uuids); - } - if (logger.isDebugEnabled()) - { - logger.debug("Pre-loaded " + size + " nodes."); - } - } - - /** - * Loads the nodes into cache using batching. - */ - private void cacheNodes(StoreRef storeRef, List uuids) - { - Store store = getStore(storeRef); // Be fetched from local caches - - int batchSize = 256; - List batch = new ArrayList(128); - for (String uuid : uuids) - { - batch.add(uuid); - if (batch.size() >= batchSize) - { - // Preload - cacheNodesNoBatch(store, batch); - batch.clear(); - } - } - // Load any remaining nodes - if (batch.size() > 0) - { - cacheNodesNoBatch(store, batch); - } - } - - /** - * Uses a Critera to preload the nodes without batching - */ - @SuppressWarnings("unchecked") - private void cacheNodesNoBatch(Store store, List uuids) - { - Criteria criteria = getSession().createCriteria(NodeImpl.class, "node"); - criteria.setResultTransformer(Criteria.ROOT_ENTITY); - criteria.add(Restrictions.eq("store.id", store.getId())); - criteria.add(Restrictions.in("uuid", uuids)); - criteria.setCacheMode(CacheMode.PUT); - criteria.setFlushMode(FlushMode.MANUAL); - - List nodeList = criteria.list(); - Set nodeIds = new HashSet(nodeList.size() * 2); - for (Node node : nodeList) - { - // We have duplicate nodes, so make sure we only process each node once - Long nodeId = node.getId(); - if (!nodeIds.add(nodeId)) - { - // Already processed - continue; - } - storeAndNodeIdCache.put(node.getNodeRef(), nodeId); - } - - if (nodeIds.size() == 0) - { - // Can't query - return; - } - - criteria = getSession().createCriteria(ChildAssocImpl.class, "parentAssoc"); - criteria.setResultTransformer(Criteria.ROOT_ENTITY); - criteria.add(Restrictions.in("child.id", nodeIds)); - criteria.setCacheMode(CacheMode.PUT); - criteria.setFlushMode(FlushMode.MANUAL); - List parentAssocs = criteria.list(); - Map> parentAssocMap = new HashMap>(nodeIds.size() * 2); - for (ChildAssoc parentAssoc : parentAssocs) - { - Long nodeId = parentAssoc.getChild().getId(); - List parentAssocsOfNode = parentAssocMap.get(nodeId); - if (parentAssocsOfNode == null) - { - parentAssocsOfNode = new ArrayList(3); - parentAssocMap.put(nodeId, parentAssocsOfNode); - } - parentAssocsOfNode.add(parentAssoc); - if (isDebugParentAssocCacheEnabled) - { - loggerParentAssocsCache.debug("\n" + "Parent associations cache - Adding entry: \n" + " Node: " - + nodeId + "\n" + " Assocs: " + parentAssocsOfNode); - } - } - // Cache NodeInfo for each node - for (Node node : nodeList) - { - Long nodeId = node.getId(); - List parentAsssocsOfNode = parentAssocMap.get(nodeId); - if (parentAsssocsOfNode == null) - { - parentAsssocsOfNode = Collections.emptyList(); - } - parentAssocsCache.put(nodeId, new NodeInfo(node, null, qnameDAO, parentAsssocsOfNode)); - } - } - - /** - *

-     * Node ID = (Long) row[0];
-     * Node Protocol = (String) row[1];
-     * Node Identifier = (String) row[2];
-     * Node Uuid = (String) row[3];
-     * 
- */ - private void processNodeResults(ScrollableResults queryResults, NodeRefQueryCallback resultsCallback) - { - while (queryResults.next()) - { - Object[] row = queryResults.get(); - Long parentId = (Long) row[0]; - String parentProtocol = (String) row[1]; - String parentIdentifier = (String) row[2]; - String parentUuid = (String) row[3]; - NodeRef parentNodeRef = new NodeRef(parentProtocol, parentIdentifier, parentUuid); - Pair parentNodePair = new Pair(parentId, parentNodeRef); - // Call back - boolean moreRequired = resultsCallback.handle(parentNodePair); - if (!moreRequired) - { - break; - } - } - } - - /** - * @param childNode the child node - * @return Returns the parent associations without any interpretation - */ - @SuppressWarnings("unchecked") - private NodeInfo getParentAssocsInternal(final Long childNodeId) - { - // First check the cache - NodeInfo nodeInfo = parentAssocsCache.get(childNodeId); - if (nodeInfo != null) - { - // Let's ensure this ref hasn't become stale due to a concurrent cascade delete - try - { - for (Long assocId : nodeInfo.getParentAssocs().keySet()) - { - getChildAssocNotNull(assocId); - } - if (isDebugParentAssocCacheEnabled) - { - loggerParentAssocsCache.debug("\n" + "Parent associations cache - Hit: \n" + " Node: " - + childNodeId + "\n" + " Assocs: " + nodeInfo.getParentAssocs().keySet()); - } - } - catch (ObjectNotFoundException e) - { - parentAssocsCache.remove(childNodeId); - nodeInfo = null; - } - } - // Did we manage to get the parent assocs - if (nodeInfo == null) - { - // Assume stale data if the node has been deleted - Node node = getNodeNotNull(childNodeId); - if (node.getDeleted()) - { - throw new ObjectNotFoundException(childNodeId, NodeImpl.class.getName()); - } - - if (isDebugParentAssocCacheEnabled) - { - loggerParentAssocsCache.debug("\n" + "Parent associations cache - Miss: \n" + " Node: " - + childNodeId + "\n" + " Assocs: null"); - } - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(HibernateNodeDaoServiceImpl.QUERY_GET_PARENT_ASSOCS).setLong( - "childId", childNodeId); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.list(); - } - }; - List rows = (List) getHibernateTemplate().execute(callback); - - nodeInfo = new NodeInfo(node, null, qnameDAO, rows); - // Populate the cache - parentAssocsCache.put(childNodeId, nodeInfo); - if (isDebugParentAssocCacheEnabled) - { - loggerParentAssocsCache.debug("\n" + "Parent associations cache - Adding entry: \n" + " Node: " - + childNodeId + "\n" + " Assocs: " + nodeInfo.getParentAssocs().keySet()); - } - } - - // Done - return nodeInfo; - } - public void getNodesDeletedInOldTxns( - final Long minNodeId, - long maxCommitTime, - final int count, - NodeRefQueryCallback resultsCallback) - { - // Get the max transaction ID - final Long maxTxnId = getMaxTxnIdByCommitTime(maxCommitTime); - - // Shortcut - if (maxTxnId == null) - { - return; - } - - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_DELETED_NODES_BY_MAX_TXNID); - query.setLong("minNodeId", minNodeId); - query.setLong("maxTxnId", maxTxnId); - query.setMaxResults(count); - query.setReadOnly(true); - return query.scroll(ScrollMode.FORWARD_ONLY); - } - }; - ScrollableResults queryResults = null; - try - { - queryResults = (ScrollableResults) getHibernateTemplate().execute(callback); - processNodeResults(queryResults, resultsCallback); - } - finally - { - if (queryResults != null) - { - queryResults.close(); - } - } - // Done - } - - /* - * Queries for transactions - */ - private static final String QUERY_GET_TXN_BY_ID = "txn.GetTxnById"; - private static final String QUERY_GET_MIN_COMMIT_TIME = "txn.GetMinCommitTime"; - private static final String QUERY_GET_MAX_COMMIT_TIME = "txn.GetMaxCommitTime"; - private static final String QUERY_GET_MAX_ID_BY_COMMIT_TIME = "txn.GetMaxIdByCommitTime"; - private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_ASC = "txn.GetTxnsByCommitTimeAsc"; - private static final String QUERY_GET_TXNS_BY_COMMIT_TIME_DESC = "txn.GetTxnsByCommitTimeDesc"; - private static final String QUERY_GET_SELECTED_TXNS_BY_COMMIT_TIME_ASC = "txn.GetSelectedTxnsByCommitAsc"; - private static final String QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE = "txn.GetTxnUpdateCountForStore"; - private static final String QUERY_GET_TXN_DELETE_COUNT_FOR_STORE = "txn.GetTxnDeleteCountForStore"; - private static final String QUERY_COUNT_TRANSACTIONS = "txn.CountTransactions"; - private static final String QUERY_GET_TXN_CHANGES_FOR_STORE = "txn.GetTxnChangesForStore"; - private static final String QUERY_GET_TXN_CHANGES = "txn.GetTxnChanges"; - private static final String QUERY_GET_TXNS_UNUSED = "txn.GetTxnsUnused"; - - public Transaction getTxnById(final long txnId) - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXN_BY_ID); - query.setLong("txnId", txnId).setReadOnly(true); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.uniqueResult(); - } - }; - Transaction txn = (Transaction) getHibernateTemplate().execute(callback); - // done - return txn; - } - - public Long getMinTxnCommitTime() - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_MIN_COMMIT_TIME); - query.setReadOnly(true); - return query.uniqueResult(); - } - }; - Long commitTime = (Long) getHibernateTemplate().execute(callback); - // done - return (commitTime == null) ? 0L : commitTime; - } - - public Long getMaxTxnCommitTime() - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_MAX_COMMIT_TIME); - query.setReadOnly(true); - return query.uniqueResult(); - } - }; - Long commitTime = (Long) getHibernateTemplate().execute(callback); - // done - return (commitTime == null) ? 0L : commitTime; - } - - public Long getMaxTxnIdByCommitTime(final long maxCommitTime) - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_MAX_ID_BY_COMMIT_TIME); - query.setLong("maxCommitTime", maxCommitTime); - query.setReadOnly(true); - return query.uniqueResult(); - } - }; - Long txnId = (Long) getHibernateTemplate().execute(callback); - // done - return txnId; - } - - @SuppressWarnings("unchecked") - public List getTxnsByMinCommitTime(final List includeTxnIds) - { - if (includeTxnIds.size() == 0) - { - return null; - } - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_SELECTED_TXNS_BY_COMMIT_TIME_ASC); - query.setParameterList("includeTxnIds", includeTxnIds).setReadOnly(true); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.list(); - } - }; - List txns = (List) getHibernateTemplate().execute(callback); - // done - return txns; - } - - public int getTxnUpdateCount(final long txnId) - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXN_UPDATE_COUNT_FOR_STORE); - query.setLong("txnId", txnId).setReadOnly(true); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.uniqueResult(); - } - }; - Long count = (Long) getHibernateTemplate().execute(callback); - // done - return count.intValue(); - } - - public int getTxnDeleteCount(final long txnId) - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXN_DELETE_COUNT_FOR_STORE); - query.setLong("txnId", txnId).setReadOnly(true); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.uniqueResult(); - } - }; - Long count = (Long) getHibernateTemplate().execute(callback); - // done - return count.intValue(); - } - - public int getTransactionCount() - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_COUNT_TRANSACTIONS); - query.setMaxResults(1).setReadOnly(true); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.uniqueResult(); - } - }; - Long count = (Long) getHibernateTemplate().execute(callback); - // done - return count.intValue(); - } - - private static final Long TXN_ID_DUD = Long.valueOf(-1L); - private static final Long SERVER_ID_DUD = Long.valueOf(-1L); - private static final long MIN_TIME_QUERY_RANGE = 10L * 60L * 1000L; // 10 minutes - - @SuppressWarnings("unchecked") - public List getTxnsByCommitTimeAscending( - long fromTimeInclusive, - long toTimeExclusive, - int count, - List excludeTxnIds, - boolean remoteOnly) - { - // Start with some sane defaults - if (fromTimeInclusive < 0L) - { - fromTimeInclusive = getMinTxnCommitTime(); - } - if (toTimeExclusive < 0L || toTimeExclusive == Long.MAX_VALUE) - { - toTimeExclusive = ((long) getMaxTxnCommitTime()) + 1L; - } - // Get the time difference required - long diffTime = toTimeExclusive - fromTimeInclusive; - if (diffTime <= 0) - { - // There can be no results - return Collections.emptyList(); - } - - // Make sure that we have at least one entry in the exclude list - final List excludeTxnIdsInner = new ArrayList(excludeTxnIds == null ? 1 : excludeTxnIds.size()); - if (excludeTxnIds == null || excludeTxnIds.isEmpty()) - { - excludeTxnIdsInner.add(TXN_ID_DUD); - } - else - { - excludeTxnIdsInner.addAll(excludeTxnIds); - } - final List excludeServerIds = new ArrayList(1); - if (remoteOnly) - { - // Get the current server ID. This can be null if no transactions have been written by - // a server with this IP address. - Long serverId = getServerIdOrNull(); - if (serverId == null) - { - excludeServerIds.add(SERVER_ID_DUD); - } - else - { - excludeServerIds.add(serverId); - } - } - else - { - excludeServerIds.add(SERVER_ID_DUD); - } - - List results = new ArrayList(count); - // Each query must be constrained in the time range, - // so query larger and larger sets until enough results are retrieved. - long iteration = 0L; - long queryFromTimeInclusive = fromTimeInclusive; - long queryToTimeExclusive = fromTimeInclusive; - int queryCount = count; - while ((results.size() < count) && (queryToTimeExclusive <= toTimeExclusive)) - { - iteration++; - queryFromTimeInclusive = queryToTimeExclusive; - queryToTimeExclusive += (iteration * MIN_TIME_QUERY_RANGE); - queryCount = count - results.size(); - - final long innerQueryFromTimeInclusive = queryFromTimeInclusive; - final long innerQueryToTimeExclusive = queryToTimeExclusive; - final int innerQueryCount = queryCount; - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXNS_BY_COMMIT_TIME_ASC); - query.setLong("fromTimeInclusive", innerQueryFromTimeInclusive).setLong("toTimeExclusive", - innerQueryToTimeExclusive).setParameterList("excludeTxnIds", excludeTxnIdsInner) - .setParameterList("excludeServerIds", excludeServerIds).setMaxResults(innerQueryCount) - .setReadOnly(true); - return query.list(); - } - }; - List queryResults = (List) getHibernateTemplate().execute(callback); - // Copy results over - results.addAll(queryResults); - } - // done - return results; - } - - @SuppressWarnings("unchecked") - public List getTxnsByCommitTimeDescending( - long fromTimeInclusive, - long toTimeExclusive, - int count, - List excludeTxnIds, - boolean remoteOnly) - { - // Start with some sane defaults - if (fromTimeInclusive < 0L) - { - fromTimeInclusive = getMinTxnCommitTime(); - } - if (toTimeExclusive < 0L || toTimeExclusive == Long.MAX_VALUE) - { - toTimeExclusive = ((long) getMaxTxnCommitTime()) + 1L; - } - // Get the time difference required - long diffTime = toTimeExclusive - fromTimeInclusive; - if (diffTime <= 0) - { - // There can be no results - return Collections.emptyList(); - } - - // Make sure that we have at least one entry in the exclude list - final List excludeTxnIdsInner = new ArrayList(excludeTxnIds == null ? 1 : excludeTxnIds.size()); - if (excludeTxnIds == null || excludeTxnIds.isEmpty()) - { - excludeTxnIdsInner.add(TXN_ID_DUD); - } - else - { - excludeTxnIdsInner.addAll(excludeTxnIds); - } - final List excludeServerIds = new ArrayList(1); - if (remoteOnly) - { - // Get the current server ID. This can be null if no transactions have been written by - // a server with this IP address. - Long serverId = getServerIdOrNull(); - if (serverId == null) - { - excludeServerIds.add(SERVER_ID_DUD); - } - else - { - excludeServerIds.add(serverId); - } - } - else - { - excludeServerIds.add(SERVER_ID_DUD); - } - - List results = new ArrayList(count); - // Each query must be constrained in the time range, - // so query larger and larger sets until enough results are retrieved. - long iteration = 0L; - long queryFromTimeInclusive = toTimeExclusive; - long queryToTimeExclusive = toTimeExclusive; - int queryCount = count; - while ((results.size() < count) && (queryFromTimeInclusive >= fromTimeInclusive)) - { - iteration++; - queryToTimeExclusive = queryFromTimeInclusive; - queryFromTimeInclusive -= (iteration * MIN_TIME_QUERY_RANGE); - queryCount = count - results.size(); - - final long innerQueryFromTimeInclusive = queryFromTimeInclusive; - final long innerQueryToTimeExclusive = queryToTimeExclusive; - final int innerQueryCount = queryCount; - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXNS_BY_COMMIT_TIME_DESC); - query.setLong("fromTimeInclusive", innerQueryFromTimeInclusive).setLong("toTimeExclusive", - innerQueryToTimeExclusive).setParameterList("excludeTxnIds", excludeTxnIdsInner) - .setParameterList("excludeServerIds", excludeServerIds).setMaxResults(innerQueryCount) - .setReadOnly(true); - return query.list(); - } - }; - List queryResults = (List) getHibernateTemplate().execute(callback); - // Copy results over - results.addAll(queryResults); - } - // done - return results; - } - - @SuppressWarnings("unchecked") - public List getTxnChangesForStore(final StoreRef storeRef, final long txnId) - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXN_CHANGES_FOR_STORE); - query.setLong("txnId", txnId).setString("protocol", storeRef.getProtocol()).setString("identifier", - storeRef.getIdentifier()).setReadOnly(true); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.list(); - } - }; - List results = (List) getHibernateTemplate().execute(callback); - // transform into a simpler form - List nodeRefs = new ArrayList(results.size()); - for (Node node : results) - { - NodeRef nodeRef = node.getNodeRef(); - nodeRefs.add(nodeRef); - } - // done - return nodeRefs; - } - - @SuppressWarnings("unchecked") - public List getTxnChanges(final long txnId) - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXN_CHANGES); - query.setLong("txnId", txnId).setReadOnly(true); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.list(); - } - }; - List results = (List) getHibernateTemplate().execute(callback); - // transform into a simpler form - List nodeRefs = new ArrayList(results.size()); - for (Node node : results) - { - NodeRef nodeRef = node.getNodeRef(); - nodeRefs.add(nodeRef); - } - // done - return nodeRefs; - } - - @SuppressWarnings("unchecked") - public List getTxnsUnused(final Long minTxnId, final long maxCommitTime, final int count) - { - HibernateCallback callback = new HibernateCallback() - { - public Object doInHibernate(Session session) - { - Query query = session.getNamedQuery(QUERY_GET_TXNS_UNUSED); - query.setReadOnly(true).setMaxResults(count).setLong("minTxnId", minTxnId).setLong("maxCommitTime", - maxCommitTime); - DirtySessionMethodInterceptor.setQueryFlushMode(session, query); - return query.list(); - } - }; - List results = (List) getHibernateTemplate().execute(callback); - // done - return results; - } - - public void purgeTxn(Long txnId) - { - Transaction txn = (Transaction) getSession().get(TransactionImpl.class, txnId); - if (txn != null) - { - getHibernateTemplate().delete(txn); - } - } - - // ============ PROPERTY HELPER METHODS =================// - - public static Map convertToPersistentProperties( - Map in, - QNameDAO qnameDAO, - LocaleDAO localeDAO, - ContentDataDAO contentDataDAO, - DictionaryService dictionaryService) - { - Map propertyMap = new HashMap( - in.size() + 5); - for (Map.Entry entry : in.entrySet()) - { - Serializable value = entry.getValue(); - // Get the qname ID - QName propertyQName = entry.getKey(); - Long propertyQNameId = qnameDAO.getOrCreateQName(propertyQName).getFirst(); - // Get the locale ID - Long propertylocaleId = localeDAO.getOrCreateDefaultLocalePair().getFirst(); - // Get the property definition, if available - PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName); - // Add it to the map - HibernateNodeDaoServiceImpl.addValueToPersistedProperties(propertyMap, propertyDef, - HibernateNodeDaoServiceImpl.IDX_NO_COLLECTION, propertyQNameId, propertylocaleId, value, localeDAO, - contentDataDAO); - } - // Done - return propertyMap; - } - - /** - * The collection index used to indicate that the value is not part of a collection. All values from zero up are - * used for real collection indexes. - */ - private static final int IDX_NO_COLLECTION = -1; - - /** - * A method that adds properties to the given map. It copes with collections. - * - * @param propertyDef the property definition (null is allowed) - * @param collectionIndex the index of the property in the collection or -1 if we are not yet processing a - * collection - */ - private static void addValueToPersistedProperties( - Map propertyMap, - PropertyDefinition propertyDef, - int collectionIndex, - Long propertyQNameId, - Long propertyLocaleId, - Serializable value, - LocaleDAO localeDAO, - ContentDataDAO contentDataDAO) - { - if (value == null) - { - // The property is null. Null is null and cannot be massaged any other way. - NodePropertyValue npValue = HibernateNodeDaoServiceImpl.makeNodePropertyValue(propertyDef, null); - NodePropertyKey npKey = new NodePropertyKey(); - npKey.setListIndex(collectionIndex); - npKey.setQnameId(propertyQNameId); - npKey.setLocaleId(propertyLocaleId); - // Add it to the map - propertyMap.put(npKey, npValue); - // Done - return; - } - - // Get or spoof the property datatype - QName propertyTypeQName; - if (propertyDef == null) // property not recognised - { - // allow it for now - persisting excess properties can be useful sometimes - propertyTypeQName = DataTypeDefinition.ANY; - } - else - { - propertyTypeQName = propertyDef.getDataType().getName(); - } - - // A property may appear to be multi-valued if the model definition is loose and - // an unexploded collection is passed in. Otherwise, use the model-defined behaviour - // strictly. - boolean isMultiValued; - if (propertyTypeQName.equals(DataTypeDefinition.ANY)) - { - // It is multi-valued if required (we are not in a collection and the property is a new collection) - isMultiValued = (value != null) && (value instanceof Collection) - && (collectionIndex == IDX_NO_COLLECTION); - } - else - { - isMultiValued = propertyDef.isMultiValued(); - } - - // Handle different scenarios. - // - Do we need to explode a collection? - // - Does the property allow collections? - if (collectionIndex == IDX_NO_COLLECTION && isMultiValued && !(value instanceof Collection)) - { - // We are not (yet) processing a collection but the property should be part of a collection - HibernateNodeDaoServiceImpl.addValueToPersistedProperties(propertyMap, propertyDef, 0, propertyQNameId, - propertyLocaleId, value, localeDAO, contentDataDAO); - } - else if (collectionIndex == IDX_NO_COLLECTION && value instanceof Collection) - { - // We are not (yet) processing a collection and the property is a collection i.e. needs exploding - // Check that multi-valued properties are supported if the property is a collection - if (!isMultiValued) - { - throw new DictionaryException("A single-valued property of this type may not be a collection: \n" - + " Property: " + propertyDef + "\n" + " Type: " + propertyTypeQName + "\n" + " Value: " - + value); - } - // We have an allowable collection. - @SuppressWarnings("unchecked") - Collection collectionValues = (Collection) value; - // Persist empty collections directly. This is handled by the NodePropertyValue. - if (collectionValues.size() == 0) - { - NodePropertyValue npValue = HibernateNodeDaoServiceImpl.makeNodePropertyValue(null, - (Serializable) collectionValues); - NodePropertyKey npKey = new NodePropertyKey(); - npKey.setListIndex(HibernateNodeDaoServiceImpl.IDX_NO_COLLECTION); - npKey.setQnameId(propertyQNameId); - npKey.setLocaleId(propertyLocaleId); - // Add it to the map - propertyMap.put(npKey, npValue); - } - // Break it up and recurse to persist the values. - collectionIndex = -1; - for (Object collectionValueObj : collectionValues) - { - collectionIndex++; - if (collectionValueObj != null && !(collectionValueObj instanceof Serializable)) - { - throw new IllegalArgumentException("Node properties must be fully serializable, " - + "including values contained in collections. \n" + " Property: " + propertyDef + "\n" - + " Index: " + collectionIndex + "\n" + " Value: " + collectionValueObj); - } - Serializable collectionValue = (Serializable) collectionValueObj; - try - { - HibernateNodeDaoServiceImpl.addValueToPersistedProperties(propertyMap, propertyDef, - collectionIndex, propertyQNameId, propertyLocaleId, collectionValue, localeDAO, - contentDataDAO); - } - catch (Throwable e) - { - throw new AlfrescoRuntimeException("Failed to persist collection entry: \n" + " Property: " - + propertyDef + "\n" + " Index: " + collectionIndex + "\n" + " Value: " - + collectionValue, e); - } - } - } - else - { - // We are either processing collection elements OR the property is not a collection - // Collections of collections are only supported by type d:any - if (value instanceof Collection && !propertyTypeQName.equals(DataTypeDefinition.ANY)) - { - throw new DictionaryException( - "Collections of collections (Serializable) are only supported by type 'd:any': \n" - + " Property: " + propertyDef + "\n" + " Type: " + propertyTypeQName + "\n" - + " Value: " + value); - } - // Handle ContentData - if (value instanceof ContentData && propertyTypeQName.equals(DataTypeDefinition.CONTENT)) - { - // Needs converting to an ID - ContentData contentData = (ContentData) value; - value = contentDataDAO.createContentData(contentData).getFirst(); - } - // Handle MLText - if (value instanceof MLText) - { - // This needs to be split up into individual strings - MLText mlTextValue = (MLText) value; - for (Map.Entry mlTextEntry : mlTextValue.entrySet()) - { - Locale mlTextLocale = mlTextEntry.getKey(); - String mlTextStr = mlTextEntry.getValue(); - // Get the Locale ID for the text - Long mlTextLocaleId = localeDAO.getOrCreateLocalePair(mlTextLocale).getFirst(); - // This is persisted against the current locale, but as a d:text instance - NodePropertyValue npValue = new NodePropertyValue(DataTypeDefinition.TEXT, mlTextStr); - NodePropertyKey npKey = new NodePropertyKey(); - npKey.setListIndex(collectionIndex); - npKey.setQnameId(propertyQNameId); - npKey.setLocaleId(mlTextLocaleId); - // Add it to the map - propertyMap.put(npKey, npValue); - } - } - else - { - NodePropertyValue npValue = HibernateNodeDaoServiceImpl.makeNodePropertyValue(propertyDef, value); - NodePropertyKey npKey = new NodePropertyKey(); - npKey.setListIndex(collectionIndex); - npKey.setQnameId(propertyQNameId); - npKey.setLocaleId(propertyLocaleId); - // Add it to the map - propertyMap.put(npKey, npValue); - } - } - } - - /** - * Helper method to convert the Serializable value into a full, persistable {@link NodePropertyValue}. - *

- * Where the property definition is null, the value will take on the {@link DataTypeDefinition#ANY generic ANY} - * value. - *

- * Collections are NOT supported. These must be split up by the calling code before calling this method. Map - * instances are supported as plain serializable instances. - * - * @param propertyDef the property dictionary definition, may be null - * @param value the value, which will be converted according to the definition - may be null - * @return Returns the persistable property value - */ - private static NodePropertyValue makeNodePropertyValue(PropertyDefinition propertyDef, Serializable value) - { - // get property attributes - final QName propertyTypeQName; - if (propertyDef == null) // property not recognised - { - // allow it for now - persisting excess properties can be useful sometimes - propertyTypeQName = DataTypeDefinition.ANY; - } - else - { - propertyTypeQName = propertyDef.getDataType().getName(); - } - try - { - NodePropertyValue propertyValue = new NodePropertyValue(propertyTypeQName, value); - // done - return propertyValue; - } - catch (TypeConversionException e) - { - throw new TypeConversionException( - "The property value is not compatible with the type defined for the property: \n" + " property: " - + (propertyDef == null ? "unknown" : propertyDef) + "\n" + " value: " + value + "\n" - + " value type: " + value.getClass(), e); - } - } - - public static Serializable getPublicProperty( - Map propertyValues, - QName propertyQName, - QNameDAO qnameDAO, - LocaleDAO localeDAO, - ContentDataDAO contentDataDAO, - DictionaryService dictionaryService) - { - // Get the qname ID - Pair qnamePair = qnameDAO.getQName(propertyQName); - if (qnamePair == null) - { - // There is no persisted property with that QName, so we can't match anything - return null; - } - Long qnameId = qnamePair.getFirst(); - // Now loop over the properties and extract those with the given qname ID - SortedMap scratch = new TreeMap(); - for (Map.Entry entry : propertyValues.entrySet()) - { - NodePropertyKey propertyKey = entry.getKey(); - if (propertyKey.getQnameId().equals(qnameId)) - { - scratch.put(propertyKey, entry.getValue()); - } - } - // If we found anything, then collapse the properties to a Serializable - if (scratch.size() > 0) - { - PropertyDefinition propertyDef = dictionaryService.getProperty(propertyQName); - Serializable collapsedValue = HibernateNodeDaoServiceImpl.collapsePropertiesWithSameQName(propertyDef, - scratch, localeDAO, contentDataDAO); - return collapsedValue; - } - else - { - return null; - } - } - - public static Map convertToPublicProperties( - Map propertyValues, - QNameDAO qnameDAO, - LocaleDAO localeDAO, - ContentDataDAO contentDataDAO, - DictionaryService dictionaryService) - { - Map propertyMap = new HashMap(propertyValues.size(), 1.0F); - // Shortcut - if (propertyValues.size() == 0) - { - return propertyMap; - } - // We need to process the properties in order - SortedMap sortedPropertyValues = new TreeMap( - propertyValues); - // A working map. Ordering is important. - SortedMap scratch = new TreeMap(); - // Iterate (sorted) over the map entries and extract values with the same qname - Long currentQNameId = Long.MIN_VALUE; - Iterator> iterator = sortedPropertyValues.entrySet().iterator(); - while (true) - { - Long nextQNameId = null; - NodePropertyKey nextPropertyKey = null; - NodePropertyValue nextPropertyValue = null; - // Record the next entry's values - if (iterator.hasNext()) - { - Map.Entry entry = iterator.next(); - nextPropertyKey = entry.getKey(); - nextPropertyValue = entry.getValue(); - nextQNameId = nextPropertyKey.getQnameId(); - } - // If the QName is going to change, and we have some entries to process, then process them. - if (scratch.size() > 0 && (nextQNameId == null || !nextQNameId.equals(currentQNameId))) - { - QName currentQName = qnameDAO.getQName(currentQNameId).getSecond(); - PropertyDefinition currentPropertyDef = dictionaryService.getProperty(currentQName); - // We have added something to the scratch properties but the qname has just changed - Serializable collapsedValue = null; - // We can shortcut if there is only one value - if (scratch.size() == 1) - { - // There is no need to collapse list indexes - collapsedValue = HibernateNodeDaoServiceImpl.collapsePropertiesWithSameQNameAndListIndex( - currentPropertyDef, scratch, localeDAO, contentDataDAO); - } - else - { - // There is more than one value so the list indexes need to be collapsed - collapsedValue = HibernateNodeDaoServiceImpl.collapsePropertiesWithSameQName(currentPropertyDef, - scratch, localeDAO, contentDataDAO); - } - // If the property is multi-valued then the output property must be a collection - if (currentPropertyDef != null && currentPropertyDef.isMultiValued()) - { - if (collapsedValue != null && !(collapsedValue instanceof Collection)) - { - // Can't use Collections.singletonList: ETHREEOH-1172 - ArrayList collection = new ArrayList(1); - collection.add(collapsedValue); - collapsedValue = collection; - } - } - // Store the value - propertyMap.put(currentQName, collapsedValue); - // Reset - scratch.clear(); - } - if (nextQNameId != null) - { - // Add to the current entries - scratch.put(nextPropertyKey, nextPropertyValue); - currentQNameId = nextQNameId; - } - else - { - // There is no next value to process - break; - } - } - // Done - return propertyMap; - } - - private static Serializable collapsePropertiesWithSameQName( - PropertyDefinition propertyDef, - SortedMap sortedPropertyValues, - LocaleDAO localeDAO, - ContentDataDAO contentDataDAO) - { - Serializable result = null; - Collection collectionResult = null; - // A working map. Ordering is not important for this map. - Map scratch = new HashMap(3); - // Iterate (sorted) over the map entries and extract values with the same list index - Integer currentListIndex = Integer.MIN_VALUE; - Iterator> iterator = sortedPropertyValues.entrySet().iterator(); - while (true) - { - Integer nextListIndex = null; - NodePropertyKey nextPropertyKey = null; - NodePropertyValue nextPropertyValue = null; - // Record the next entry's values - if (iterator.hasNext()) - { - Map.Entry entry = iterator.next(); - nextPropertyKey = entry.getKey(); - nextPropertyValue = entry.getValue(); - nextListIndex = nextPropertyKey.getListIndex(); - } - // If the list index is going to change, and we have some entries to process, then process them. - if (scratch.size() > 0 && (nextListIndex == null || !nextListIndex.equals(currentListIndex))) - { - // We have added something to the scratch properties but the index has just changed - Serializable collapsedValue = HibernateNodeDaoServiceImpl.collapsePropertiesWithSameQNameAndListIndex( - propertyDef, scratch, localeDAO, contentDataDAO); - // Store. If there is a value already, then we must build a collection. - if (result == null) - { - result = collapsedValue; - } - else if (collectionResult != null) - { - // We have started a collection, so just add the value to it. - collectionResult.add(collapsedValue); - } - else - { - // We already had a result, and now have another. A collection has not been - // started. We start a collection and explicitly keep track of it so that - // we don't get mixed up with collections of collections (ETHREEOH-2064). - collectionResult = new ArrayList(20); - collectionResult.add(result); // Add the first result - collectionResult.add(collapsedValue); // Add the new value - result = (Serializable) collectionResult; - } - // Reset - scratch.clear(); - } - if (nextListIndex != null) - { - // Add to the current entries - scratch.put(nextPropertyKey, nextPropertyValue); - currentListIndex = nextListIndex; - } - else - { - // There is no next value to process - break; - } - } - // Make sure that multi-valued properties are returned as a collection - if (propertyDef != null && propertyDef.isMultiValued() && result != null && !(result instanceof Collection)) - { - // Can't use Collections.singletonList: ETHREEOH-1172 - ArrayList collection = new ArrayList(1); - collection.add(result); - result = collection; - } - // Done - return result; - } - - /** - * At this level, the properties have the same qname and list index. They can only be separated by locale. - * Typically, MLText will fall into this category as only. - *

- * If there are multiple values then they can only be separated by locale. If they are separated by locale, then - * they have to be text-based. This means that the only way to store them is via MLText. Any other multi-locale - * properties cannot be deserialized. - */ - private static Serializable collapsePropertiesWithSameQNameAndListIndex( - PropertyDefinition propertyDef, - Map propertyValues, - LocaleDAO localeDAO, - ContentDataDAO contentDataDAO) - { - int propertyValuesSize = propertyValues.size(); - Serializable value = null; - if (propertyValuesSize == 0) - { - // Nothing to do - } - for (Map.Entry entry : propertyValues.entrySet()) - { - NodePropertyKey propertyKey = entry.getKey(); - NodePropertyValue propertyValue = entry.getValue(); - - if (propertyValuesSize == 1 - && (propertyDef == null || !propertyDef.getDataType().getName().equals(DataTypeDefinition.MLTEXT))) - { - // This is the only value and it is NOT to be converted to MLText - value = HibernateNodeDaoServiceImpl.makeSerializableValue(propertyDef, propertyValue, contentDataDAO); - } - else - { - // There are multiple values, so add them to MLText - MLText mltext = (value == null) ? new MLText() : (MLText) value; - try - { - String mlString = (String) propertyValue.getValue(DataTypeDefinition.TEXT); - // Get the locale - Long localeId = propertyKey.getLocaleId(); - Locale locale = localeDAO.getLocalePair(localeId).getSecond(); - // Add to the MLText object - mltext.addValue(locale, mlString); - } - catch (TypeConversionException e) - { - // Ignore - logger.warn("Unable to add property value to MLText instance: " + propertyValue); - } - value = mltext; - } - } - // Done - return value; - } - - /** - * Extracts the externally-visible property from the persistable value. - * - * @param propertyDef the model property definition - may be null - * @param propertyValue the persisted property - * @param contentDataDAO component that handles ContentData persistence - * @return Returns the value of the property in the format dictated by the property definition, or null if the - * property value is null - */ - private static Serializable makeSerializableValue( - PropertyDefinition propertyDef, - NodePropertyValue propertyValue, - ContentDataDAO contentDataDAO) - { - if (propertyValue == null) - { - return null; - } - // get property attributes - final QName propertyTypeQName; - if (propertyDef == null) - { - // allow this for now - propertyTypeQName = DataTypeDefinition.ANY; - } - else - { - propertyTypeQName = propertyDef.getDataType().getName(); - } - try - { - Serializable value = propertyValue.getValue(propertyTypeQName); - // Handle conversions to and from ContentData - if (propertyTypeQName.equals(DataTypeDefinition.CONTENT) && (value instanceof Long)) - { - Pair contentDataPair = contentDataDAO.getContentData((Long) value); - if (contentDataPair == null) - { - // It is invalid - value = null; - } - else - { - value = contentDataPair.getSecond(); - } - } - // done - return value; - } - catch (TypeConversionException e) - { - throw new TypeConversionException( - "The property value is not compatible with the type defined for the property: \n" + " property: " - + (propertyDef == null ? "unknown" : propertyDef) + "\n" + " property value: " - + propertyValue, e); - } - } - - private static class NodeInfo implements Serializable - { - private static final long serialVersionUID = -2167221525380802365L; - private final boolean isRoot; - private final boolean isStoreRoot; - private final Map parentAssocInfo; - - public NodeInfo(Node node, NodeDAO nodeDAO, QNameDAO qnameDAO, List parents) - { - this.isRoot = nodeDAO.hasNodeAspect(node.getId(), ContentModel.ASPECT_ROOT); - this.isStoreRoot = node.getTypeQName(qnameDAO).equals(ContentModel.TYPE_STOREROOT); - this.parentAssocInfo = new HashMap(5); - for (Object parent : parents) - { - ChildAssoc parentAssoc = null; - if (parent instanceof ChildAssoc) - { - parentAssoc = (ChildAssoc) parent; - } - else if (parent.getClass().isArray()) - { - parentAssoc = (ChildAssoc) Array.get(parent, 0); - } - if (parentAssoc != null) - { - // Populate the results - parentAssocInfo.put(parentAssoc.getId(), new ParentAssocInfo(parentAssoc, qnameDAO)); - } - } - } - - private NodeInfo(NodeInfo copy) - { - this.isRoot = copy.isRoot; - this.isStoreRoot = copy.isStoreRoot; - this.parentAssocInfo = new HashMap(copy.parentAssocInfo); - } - - public boolean isRoot() - { - return isRoot; - } - - public boolean isStoreRoot() - { - return isStoreRoot; - } - - public Map getParentAssocs() - { - return parentAssocInfo; - } - - public NodeInfo addAssoc(Long assocId, ChildAssoc parentAssoc, QNameDAO qnameDAO) - { - return addAssoc(assocId, new ParentAssocInfo(parentAssoc, qnameDAO)); - } - - public NodeInfo addAssoc(Long assocId, ParentAssocInfo parentAssocInfo) - { - NodeInfo copy = new NodeInfo(this); - copy.parentAssocInfo.put(assocId, parentAssocInfo); - return copy; - } - - public NodeInfo removeAssoc(Long assocId) - { - NodeInfo copy = new NodeInfo(this); - copy.parentAssocInfo.remove(assocId); - return copy; - } - - } - - private static class ParentAssocInfo implements Serializable - { - private static final long serialVersionUID = -3888870827401574704L; - private final ChildAssociationRef childAssociationRef; - private final Long parentNodeId; - - public ParentAssocInfo(ChildAssoc parentAssoc, QNameDAO qnameDAO) - { - this.childAssociationRef = parentAssoc.getChildAssocRef(qnameDAO); - this.parentNodeId = parentAssoc.getParent().getId(); - } - - public ChildAssociationRef getChildAssociationRef() - { - // Return a copy, as it's mutated by prependPaths - return new ChildAssociationRef(childAssociationRef.getTypeQName(), childAssociationRef.getParentRef(), - childAssociationRef.getQName(), childAssociationRef.getChildRef(), childAssociationRef.isPrimary(), - childAssociationRef.getNthSibling()); - } - - public Long getParentNodeId() - { - return parentNodeId; - } - } -} \ No newline at end of file diff --git a/source/java/org/alfresco/repo/node/db/hibernate/SessionSizeManagementTest.java b/source/java/org/alfresco/repo/node/db/hibernate/SessionSizeManagementTest.java deleted file mode 100644 index e95bc0645b..0000000000 --- a/source/java/org/alfresco/repo/node/db/hibernate/SessionSizeManagementTest.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (C) 2005-2010 Alfresco Software Limited. - * - * This file is part of Alfresco - * - * Alfresco is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Alfresco is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with Alfresco. If not, see . - */ -package org.alfresco.repo.node.db.hibernate; - -import java.lang.reflect.Method; - -import org.alfresco.model.ContentModel; -import org.alfresco.repo.node.BaseNodeServiceTest; -import org.alfresco.repo.node.db.DbNodeServiceImpl; -import org.alfresco.repo.transaction.SingleEntryTransactionResourceInterceptor; -import org.alfresco.service.cmr.repository.NodeService; -import org.alfresco.service.namespace.NamespaceService; -import org.alfresco.service.namespace.QName; - -/** - * Tests the session size limiters in the context of a full stack. - * - * @see org.alfresco.util.resource.MethodResourceManager - * @see org.alfresco.repo.transaction.TransactionResourceInterceptor - * @see org.alfresco.repo.domain.hibernate.SessionSizeResourceManager - * - * @author Derek Hulley - */ -public class SessionSizeManagementTest extends BaseNodeServiceTest -{ - private SingleEntryTransactionResourceInterceptor interceptor; - private Method createNodesMethod; - - public SessionSizeManagementTest() - { - try - { - Class clazz = SessionSizeManagementTest.class; - createNodesMethod = clazz.getMethod( - "createNodes", - new Class[] {NodeService.class, Integer.TYPE, Boolean.TYPE}); - } - catch (Exception e) - { - throw new RuntimeException("Instantiation failed", e); - } - } - - /** - * Get the config locations - * - * @return an array containing the config locations - */ - protected String[] getConfigLocations() - { - return new String[] {"session-size-test-context.xml"}; - } - - @Override - protected NodeService getNodeService() - { - NodeService nodeService = (NodeService) applicationContext.getBean("testSessionSizeDbNodeService"); - return nodeService; - } - - @Override - protected void onSetUpInTransaction() throws Exception - { - super.onSetUpInTransaction(); - // Get the interceptor for manual testing - interceptor = (SingleEntryTransactionResourceInterceptor) applicationContext.getBean("testSessionSizeResourceInterceptor"); - } - - /** Helper to create a given number of nodes using the provided service */ - public void createNodes(NodeService nodeService, int count, boolean manualFlush) - { - for (int i = 0; i < count; i++) - { - long beforeNs = System.nanoTime(); - nodeService.createNode( - rootNodeRef, - ContentModel.ASSOC_CHILDREN, - QName.createQName(NamespaceService.ALFRESCO_URI, "child-" + i), - ContentModel.TYPE_FOLDER); - long deltaNs = System.nanoTime() - beforeNs; - // Perform manual flush if necessary - if (manualFlush) - { - interceptor.performManualCheck(createNodesMethod, deltaNs); - } - } - } - - private static final int LOAD_COUNT = 1000; - /** - * Create a bunch of nodes and see that the auto-clear is working - */ - public synchronized void testBulkLoad() throws Exception - { - NodeService nodeService = getNodeService(); - createNodes(nodeService, LOAD_COUNT, false); - // We can't check the session size as this is dependent on machine speed - } - - /** - * Create a bunch of nodes and see that the manual clearing is working. The - * original node service is used for this. - */ - public synchronized void testManualOperation() throws Exception - { - NodeService nodeService = (NodeService) applicationContext.getBean("dbNodeServiceImpl"); - if (!(nodeService instanceof DbNodeServiceImpl)) - { - fail("This test requires the unwrapped raw DbNodeServiceImpl"); - } - - createNodes(nodeService, LOAD_COUNT, true); - } -} diff --git a/source/java/org/alfresco/repo/node/index/IndexTransactionTrackerTest.java b/source/java/org/alfresco/repo/node/index/IndexTransactionTrackerTest.java index b8885f76fd..819036894d 100644 --- a/source/java/org/alfresco/repo/node/index/IndexTransactionTrackerTest.java +++ b/source/java/org/alfresco/repo/node/index/IndexTransactionTrackerTest.java @@ -25,7 +25,6 @@ import junit.framework.TestCase; import org.alfresco.model.ContentModel; import org.alfresco.repo.content.ContentStore; import org.alfresco.repo.domain.node.NodeDAO; -import org.alfresco.repo.node.db.NodeDaoService; import org.alfresco.repo.search.Indexer; import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer; import org.alfresco.repo.security.authentication.AuthenticationComponent; diff --git a/source/java/org/alfresco/repo/remote/LoaderRemoteServer.java b/source/java/org/alfresco/repo/remote/LoaderRemoteServer.java index c6c466c3ec..87b777f1d9 100644 --- a/source/java/org/alfresco/repo/remote/LoaderRemoteServer.java +++ b/source/java/org/alfresco/repo/remote/LoaderRemoteServer.java @@ -18,9 +18,15 @@ */ package org.alfresco.repo.remote; +import java.io.ByteArrayInputStream; +import java.io.Serializable; +import java.nio.charset.Charset; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.alfresco.model.ContentModel; import org.alfresco.repo.content.encoding.ContentCharsetFinder; -import org.alfresco.repo.node.db.NodeDaoService; import org.alfresco.repo.security.authentication.AuthenticationUtil; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; @@ -29,7 +35,12 @@ import org.alfresco.service.cmr.model.FileFolderService; import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.remote.FileFolderRemote; import org.alfresco.service.cmr.remote.LoaderRemote; -import org.alfresco.service.cmr.repository.*; +import org.alfresco.service.cmr.repository.ChildAssociationRef; +import org.alfresco.service.cmr.repository.ContentWriter; +import org.alfresco.service.cmr.repository.MimetypeService; +import org.alfresco.service.cmr.repository.NodeRef; +import org.alfresco.service.cmr.repository.NodeService; +import org.alfresco.service.cmr.repository.StoreRef; import org.alfresco.service.cmr.security.AuthenticationService; import org.alfresco.service.namespace.NamespaceService; import org.alfresco.service.namespace.QName; @@ -38,13 +49,6 @@ import org.alfresco.util.PropertyMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import java.io.ByteArrayInputStream; -import java.io.Serializable; -import java.nio.charset.Charset; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * Server side implementation of the LoaderServiceTransport transport * layer. This is the class that gets exported remotely as it contains the @@ -65,7 +69,6 @@ public class LoaderRemoteServer implements LoaderRemote private RetryingTransactionHelper retryingTransactionHelper; private AuthenticationService authenticationService; private NodeService nodeService; - private NodeDaoService nodeDaoService; private FileFolderService fileFolderService; private FileFolderRemote fileFolderRemote; private MimetypeService mimetypeService; @@ -95,14 +98,6 @@ public class LoaderRemoteServer implements LoaderRemote this.nodeService = nodeService; } - /** - * @param nodeDaoService the DAO for node queries - */ - public void setNodeDaoService(NodeDaoService nodeDaoService) - { - this.nodeDaoService = nodeDaoService; - } - /** * @param fileFolderService the file-specific service */ diff --git a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java index a4383c0e30..1a5a5689c9 100644 --- a/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java +++ b/source/java/org/alfresco/repo/search/impl/lucene/ADMLuceneTest.java @@ -49,7 +49,6 @@ import org.alfresco.repo.dictionary.DictionaryListener; import org.alfresco.repo.dictionary.DictionaryNamespaceComponent; import org.alfresco.repo.dictionary.M2Model; import org.alfresco.repo.dictionary.NamespaceDAOImpl; -import org.alfresco.repo.domain.hibernate.SessionSizeResourceManager; import org.alfresco.repo.node.BaseNodeServiceTest; import org.alfresco.repo.node.NodeBulkLoader; import org.alfresco.repo.search.MLAnalysisMode; @@ -631,7 +630,6 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener { public Object execute() throws Throwable { - SessionSizeResourceManager.setDisableInTransaction(); for (int i = 0; i < 100; i++) { ResultSet results = searcher.query(rootNodeRef.getStoreRef(), "lucene", query); @@ -1369,8 +1367,6 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener private void doBulkTest(int n) throws Exception { - SessionSizeResourceManager.setDisableInTransaction(); - Map testProperties = new HashMap(); testProperties.put(QName.createQName(TEST_NAMESPACE, "text-indexed-stored-tokenised-atomic"), "BULK"); for (int i = 0; i < n; i++) @@ -2202,7 +2198,6 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener { public Object execute() throws Throwable { - SessionSizeResourceManager.setDisableInTransaction(); for (int i = 0; i < 100; i+=10) { HashSet refs = new HashSet(); @@ -2310,7 +2305,6 @@ public class ADMLuceneTest extends TestCase implements DictionaryListener { public Object execute() throws Throwable { - SessionSizeResourceManager.setDisableInTransaction(); for (int i = 0; i < 20; i++) { HashSet refs = new HashSet(); diff --git a/source/java/org/alfresco/repo/security/permissions/impl/AbstractPermissionTest.java b/source/java/org/alfresco/repo/security/permissions/impl/AbstractPermissionTest.java index 848f71ec35..92c5c0d444 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/AbstractPermissionTest.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/AbstractPermissionTest.java @@ -30,7 +30,6 @@ import junit.framework.TestCase; import org.alfresco.model.ContentModel; import org.alfresco.repo.domain.node.NodeDAO; import org.alfresco.repo.domain.permissions.AclDAO; -import org.alfresco.repo.node.db.NodeDaoService; import org.alfresco.repo.security.authentication.AuthenticationComponent; import org.alfresco.repo.security.authentication.AuthenticationUtil; import org.alfresco.repo.security.authentication.MutableAuthenticationDao; diff --git a/source/java/org/alfresco/repo/security/permissions/impl/model/PermissionModelTest.java b/source/java/org/alfresco/repo/security/permissions/impl/model/PermissionModelTest.java index 4425101a56..65c7201576 100644 --- a/source/java/org/alfresco/repo/security/permissions/impl/model/PermissionModelTest.java +++ b/source/java/org/alfresco/repo/security/permissions/impl/model/PermissionModelTest.java @@ -19,19 +19,15 @@ package org.alfresco.repo.security.permissions.impl.model; import java.util.Collections; -import java.util.HashSet; import java.util.Random; import java.util.Set; -import org.alfresco.model.ContentModel; -import org.alfresco.repo.domain.hibernate.SessionSizeResourceManager; import org.alfresco.repo.security.permissions.PermissionEntry; import org.alfresco.repo.security.permissions.PermissionReference; import org.alfresco.repo.security.permissions.impl.AbstractPermissionTest; import org.alfresco.repo.security.permissions.impl.SimplePermissionReference; import org.alfresco.repo.security.permissions.impl.RequiredPermission.On; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; -import org.alfresco.service.cmr.repository.ChildAssociationRef; import org.alfresco.service.namespace.QName; public class PermissionModelTest extends AbstractPermissionTest @@ -247,12 +243,8 @@ public class PermissionModelTest extends AbstractPermissionTest namespacePrefixResolver), "Read"),null, Collections. emptySet(), On.NODE); permissionModelDAO.getRequiredPermissions(null, null, Collections. emptySet(), On.NODE); - permissionModelDAO.getGranteePermissions(null); - - Set granters = permissionModelDAO.getGrantingPermissions(null); - permissionModelDAO.getGlobalPermissionEntries().contains(null); } diff --git a/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java b/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java index b3d9531a4a..775cf0652f 100644 --- a/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java +++ b/source/java/org/alfresco/repo/transaction/AlfrescoTransactionSupport.java @@ -29,14 +29,12 @@ import java.util.Set; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.repo.cache.TransactionalCache; -import org.alfresco.repo.domain.hibernate.DirtySessionMethodInterceptor; -import org.alfresco.repo.domain.hibernate.SessionSizeResourceManager; import org.alfresco.repo.node.integrity.IntegrityChecker; import org.alfresco.repo.search.impl.lucene.LuceneIndexerAndSearcher; import org.alfresco.util.GUID; -import org.springframework.extensions.surf.util.ParameterCheck; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.springframework.extensions.surf.util.ParameterCheck; import org.springframework.orm.hibernate3.SessionFactoryUtils; import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationAdapter; @@ -673,10 +671,6 @@ public abstract class AlfrescoTransactionSupport // These are still considered part of the transaction so are executed here doBeforeCommit(readOnly); - // HACK: In order to control Hibernate's flush behaviour, we mark the point at which - // we start read-only operations during a commit - SessionSizeResourceManager.setCommitStarted(); - // Check integrity for (IntegrityChecker integrityChecker : integrityCheckers) { diff --git a/source/java/org/alfresco/repo/version/VersionMigrator.java b/source/java/org/alfresco/repo/version/VersionMigrator.java index c2950d8f91..2f70472f5b 100644 --- a/source/java/org/alfresco/repo/version/VersionMigrator.java +++ b/source/java/org/alfresco/repo/version/VersionMigrator.java @@ -31,7 +31,6 @@ import org.alfresco.model.ContentModel; import org.alfresco.repo.admin.patch.impl.MigrateVersionStorePatch; import org.alfresco.repo.batch.BatchProcessor; import org.alfresco.repo.batch.BatchProcessor.BatchProcessWorkerAdaptor; -import org.alfresco.repo.domain.hibernate.SessionSizeResourceManager; import org.alfresco.repo.lock.JobLockService; import org.alfresco.repo.node.MLPropertyInterceptor; import org.alfresco.repo.policy.BehaviourFilter; @@ -439,7 +438,6 @@ public class VersionMigrator implements ApplicationEventPublisherAware int batchCount = 0; boolean wasMLAware = MLPropertyInterceptor.setMLAware(true); - SessionSizeResourceManager.setDisableInTransaction(); try { @@ -623,7 +621,6 @@ public class VersionMigrator implements ApplicationEventPublisherAware finally { MLPropertyInterceptor.setMLAware(wasMLAware); - SessionSizeResourceManager.setEnableInTransaction(); } return migrationComplete; @@ -674,7 +671,6 @@ public class VersionMigrator implements ApplicationEventPublisherAware int batchCount = 0; boolean wasMLAware = MLPropertyInterceptor.setMLAware(true); - SessionSizeResourceManager.setDisableInTransaction(); try { @@ -760,7 +756,6 @@ public class VersionMigrator implements ApplicationEventPublisherAware finally { MLPropertyInterceptor.setMLAware(wasMLAware); - SessionSizeResourceManager.setEnableInTransaction(); } if (notMigratedCount > 0)